aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa

0

let
  Quelle = DateTime.FixedLocalNow(), 
  #"In Tabelle konvertiert" = #table ( 1, { { Quelle } } ), 

  #"Umbenannte Spalten" = Table.RenameColumns (
    #"In Tabelle konvertiert", 
    { { "Column1", "LastUpdateTS" } }

  )
, 

  #"Geänderter Typ" = Table.TransformColumnTypes (
    #"Umbenannte Spalten", 
    { { "LastUpdateTS", type datetime } }

  )
, 

  #"Hinzugefügte benutzerdefinierte Spalte" = Table.AddColumn (
    #"Geänderter Typ", 
    "LastUpdateDate", 

    each DateTime.Date ( [LastUpdateTS] )

  )
, 

  #"Geänderter Typ1" = Table.TransformColumnTypes (
    #"Hinzugefügte benutzerdefinierte Spalte", 
    { { "LastUpdateDate", type date } }

  )

in

  #"Geänderter Typ1"

1

let
  Quelle = Access.Database (
    File.Contents ( "C:\Users\hg\Desktop\AWC_2014.accdb" ), 
    [ CreateNavigationProperties = true ]

  )
, 

  _Sales_SalesOrderHeader = Quelle{[ Schema = "", Item = "Sales_SalesOrderHeader" ]}[Data], 

  #"Andere entfernte Spalten" = Table.SelectColumns (
    _Sales_SalesOrderHeader, 
    { "SalesOrderID", "OrderDate", "CustomerID", "SubTotal", "TotalDue" }

  )

in

  #"Andere entfernte Spalten"

2

let
  MyFunc = ( x, y as text ) => "nothing"

in

  MyFunc

3

let
  Quelle = DateTime.FixedLocalNow(), 
  #"Result" = #table (
    type table [ #"Update TS" = datetime, #"Update Date" = date, #"Update Time" = time ], 
    { { Quelle, DateTime.Date ( Quelle ), DateTime.Time ( Quelle ) } }

  )

in

  #"Result"

4

let
  Quelle = List.Dates ( #date ( 2018, 1, 1 ), 365, #duration ( 1, 0, 0, 0 ) )

in

  Quelle

5

( year as number ) =>
  let
    M      = 24,  //Konstante für die Jahre 2000-2099
    N      = 5,  //Konstante für die Jahre 2000-2099

    a      = Number.Mod ( year, 4 ), 

    b      = Number.Mod ( year, 7 ), 

    c      = Number.Mod ( year, 19 ), 

    d      = Number.Mod ( 19 * c + M, 30 ), 

    e      = Number.Mod ( 2 * a + 4 * b + 6 * d + N, 7 ), 

    f      = Number.IntegerDivide ( c + 11 * d + 22 * e, 451 ), 

    res1   = 22 + d + e - 7 * f, 

    easter = if res1 > 31 then #date ( year, 4, ( d + e - 7 * f - 9 ) ) else #date ( year, 3, res1 )

  in

    easter as date

6

result
  =
 List.Generate (
    () => [ PageNumber = 0, Func = null ], 
    each ( try _[Func] )[HasError] = false, 

    each [ PageNumber = _[PageNumber] + 1, Func = fnNbaPlayerForPage ( [PageNumber] + 1 ) ], 

    each _[Func]

  )

7

let
  Quelle = Table.FromRows (
    Json.Document (
      Binary.Decompress (
        Binary.FromText (
          "bdJbCoQwDIXhrQw+D5Kc2ttsRdz/NsbaPJyagC+mfBB+cp4bRMv23VB3Sfv4+Qh+Ivd3T5PsKq/p9X1QHagYqozQDVWH2njOhtqCmqHmUB+LqKG+oGqovxFskQdBFpQn4qkhnYtMpEsINaQOgeoBUT2eGkpUDymqx1NDB4XAEYXgqaE8F5koL6gYyg4VrlfCeu4iULleDeu5i0DjEC0M4S4Cnc4IPTojuItIQvWSRPV4el1/", 
          BinaryEncoding.Base64

        )
, 

        Compression.Deflate

      )

    )
, 

    let
      _t = ( ( type text ) meta [ Serialized.Text = true ] )

    in

      type table [ Jahr = _t, StartSommerzeit = _t, StartWinterzeit = _t ]

  )
, 

  #"Geänderter Typ" = Table.TransformColumnTypes (
    Quelle, 
    {
      { "Jahr", Int64.Type }, 
      { "StartSommerzeit", type datetime }, 

      { "StartWinterzeit", type datetime }

    }

  )

in

  #"Geänderter Typ"

8

let
  Quelle = ""

in

  Quelle

9

let
  MyFunc = ( x, y as nullable text ) => "nothing"

in

  MyFunc

10

let
  Quelle = List.Dates ( #date ( 2018, 1, 1 ), 365, #duration ( 1, 0, 0, 0 ) ), 
  #"In Tabelle konvertiert" = Table.FromList (
    Quelle, 
    Splitter.SplitByNothing(), 

    null, 

    null, 

    ExtraValues.Error

  )
, 

  #"Umbenannte Spalten" = Table.RenameColumns (
    #"In Tabelle konvertiert", 
    { { "Column1", "Datum" } }

  )
, 

  #"Geänderter Typ" = Table.TransformColumnTypes (
    #"Umbenannte Spalten", 
    { { "Datum", type date } }

  )
, 

  #"Add Jahr" = Table.AddColumn ( #"Geänderter Typ", "Jahr", each Date.Year ( [Datum] ) ), 

  #"Add Monat" = Table.AddColumn ( #"Add Jahr", "Monat", each Date.Month ( [Datum] ) ), 

  #"Add Monatsname" = Table.AddColumn (
    #"Add Monat", 
    "Monatsname", 

    each Date.ToText ( [Datum], "MMMM" )

  )
, 

  #"Add MonatsnameKurz" = Table.AddColumn (
    #"Add Monatsname", 
    "MonatsnameKurz", 

    each Date.ToText ( [Datum], "MMM" )

  )
, 

  #"Add YYYYMM" = Table.AddColumn (
    #"Add MonatsnameKurz", 
    "YYYYMM", 

    each Number.ToText ( [Jahr] ) & Date.ToText ( [Datum], "MM" )

  )
, 

  #"Add YYYY/MMMM" = Table.AddColumn (
    #"Add YYYYMM", 
    "YYYY/MMMM", 

    each Number.ToText ( [Jahr] ) & "/" & [Monatsname]

  )
, 

  #"Add YYYY/MMM" = Table.AddColumn (
    #"Add YYYY/MMMM", 
    "YYYY/MMM", 

    each Number.ToText ( [Jahr] ) & "/" & [MonatsnameKurz]

  )
, 

  #"Add Quartal" = Table.AddColumn (
    #"Add YYYY/MMM", 
    "Quartal", 

    each Date.QuarterOfYear ( [Datum] )

  )
, 

  #"Add Quartalname" = Table.AddColumn (
    #"Add Quartal", 
    "Quartalname", 

    each "Q" & Number.ToText ( [Quartal] )

  )
, 

  #"Add YYYYQ" = Table.AddColumn (
    #"Add Quartalname", 
    "YYYYQ", 

    each Number.ToText ( [Jahr] ) & Number.ToText ( [Quartal] )

  )
, 

  #"Add YYYY/Q" = Table.AddColumn (
    #"Add YYYYQ", 
    "YYYY/Q", 

    each Number.ToText ( [Jahr] ) & "/" & [Quartalname]

  )
, 

  #"Add Kalenderwoche" = Table.AddColumn (
    #"Add YYYY/Q", 
    "Kalenderwoche", 

    each Date.WeekOfYear ( [Datum], Day.Monday )

  )
, 

  #"Add Wochentag" = Table.AddColumn (
    #"Add Kalenderwoche", 
    "Wochentag", 

    each Date.DayOfWeek ( [Datum], Day.Monday )

  )
, 

  #"Add WochentagName" = Table.AddColumn (
    #"Add Wochentag", 
    "WochentagName", 

    each Date.DayOfWeekName ( [Datum] )

  )
, 

  #"Add WochentagNameKurz" = Table.AddColumn (
    #"Add WochentagName", 
    "WochentagNameKurz", 

    each Date.ToText ( [Datum], "ddd" )

  )
, 

  #"Geänderter Typ1" = Table.TransformColumnTypes (
    #"Add WochentagNameKurz", 
    {
      { "Jahr", Int64.Type }, 
      { "Monat", Int64.Type }, 

      { "Monatsname", type text }, 

      { "MonatsnameKurz", type text }, 

      { "YYYYMM", Int64.Type }, 

      { "YYYY/MMMM", type text }, 

      { "YYYY/MMM", type text }, 

      { "Quartal", Int64.Type }, 

      { "Quartalname", type text }, 

      { "YYYYQ", Int64.Type }, 

      { "YYYY/Q", type text }, 

      { "Kalenderwoche", Int64.Type }, 

      { "Wochentag", Int64.Type }, 

      { "WochentagName", type text }, 

      { "WochentagNameKurz", type text }

    }

  )

in

  #"Geänderter Typ1"

11

( fromYear as number, toYear as number ) =>
  let
    getEasterDate = ( year as number ) =>
      let
        M = 24, 
        N = 5, 

        a = Number.Mod ( year, 4 ), 

        b = Number.Mod ( year, 7 ), 

        c = Number.Mod ( year, 19 ), 

        d = Number.Mod ( 19 * c + M, 30 ), 

        e = Number.Mod ( 2 * a + 4 * b + 6 * d + N, 7 ), 

        f = Number.IntegerDivide ( c + 11 * d + 22 * e, 451 ), 

        res1 = 22 + d + e - 7 * f, 

        easter = 
          if
 res1 > 31 then
            #date ( year, 4, ( d + e - 7 * f - 9 ) )

          else

            #date ( year, 3, res1 )

      in

        easter as date
, 

    getFeiertage4Year = ( year as number ) =>
      let
        easterDate = getEasterDate ( year ), 
        resTable = #table (
          { "Datum", "Feiertag" }, 
          {
            { #date ( year, 1, 1 ), "Neujahr" }, 
            { Date.AddDays ( easterDate, - 2 ), "Karfreitag" }, 

            { easterDate, "Ostern" }, 

            { Date.AddDays ( easterDate, 1 ), "Ostermontag" }, 

            { Date.AddDays ( easterDate, 39 ), "Christi Himmelfahrt" }, 

            { Date.AddDays ( easterDate, 49 ), "Pfingsten" }, 

            { Date.AddDays ( easterDate, 50 ), "Pfingstmontag" }, 

            { #date ( year, 5, 1 ), "Tag der Arbeit" }, 

            { #date ( year, 10, 3 ), "Tag der deutschen Einheit" }, 

            { #date ( year, 12, 25 ), "erster Weihnachtsfeiertag" }, 

            { #date ( year, 12, 26 ), "zweiter Weihnachtsfeiertag" }

          }

        )

      in

        resTable
, 

    resultList = List.Generate (
      () => [ currentYear = fromYear ], 
      ( x ) => x[currentYear] <= toYear, 

      ( x ) => [ currentYear = x[currentYear] + 1 ], 

      ( x ) => getFeiertage4Year ( x[currentYear] )

    )
, 

    convertToTable = Table.FromList (
      resultList, 
      Splitter.SplitByNothing(), 

      null, 

      null, 

      ExtraValues.Error

    )
, 

    expandCols = Table.ExpandTableColumn (
      convertToTable, 
      "Column1", 

      { "Datum", "Feiertag" }, 

      { "Datum", "Feiertag" }

    )
, 

    resultTable = Table.TransformColumnTypes (
      expandCols, 
      { { "Datum", type date }, { "Feiertag", type text } }

    )

  in

    resultTable

12

result = Table.Skip ( KonvertierteTabelle, 1 )

13

( utcTS as datetime ) =>
  let
    mezTS = utcTS + #duration ( 0, 1, 0, 0 ),  // in Winterzeit umrechnen
    year = Date.Year ( DateTime.Date ( mezTS ) ), 

    row = Table.SelectRows ( SommerWinterZeit, ( curRow ) => curRow[Jahr] = year ), 

    startOfSummertime = 
      if
 Table.RowCount ( row ) = 1 then
        Table.First ( row )[StartSommerzeit]

      else

        null
, 

    endOfSummertime = 
      if
 Table.RowCount ( row ) = 1 then
        Table.First ( row )[StartWinterzeit]

      else

        null
, 

    deltaHour = 
      if
 startOfSummertime
        <>
 null

        and
 endOfSummertime <> null

        and
 mezTS >= startOfSummertime

        and
 mezTS < endOfSummertime

      then

        1

      else

        0
, 

    localTS = mezTS + #duration ( 0, deltaHour, 0, 0 )

  in

    localTS

14

let
  ErsteZahl  = 1, 
  ZweiteZahl = 1, 

  Addition   = ErsteZahl + ZweiteZahl

in

  Addition

15

let
  Start = #date ( 2010, 1, 1 ), 
  Ende = #date ( 2020, 12, 31 ), 

  #"Anzahl Tage" = Duration.Days ( Ende - Start ) + 1, 

  Quelle = List.Dates ( Start, #"Anzahl Tage", #duration ( 1, 0, 0, 0 ) ), 

  #"In Tabelle konvertiert" = Table.FromList (
    Quelle, 
    Splitter.SplitByNothing(), 

    null, 

    null, 

    ExtraValues.Error

  )
, 

  #"Umbenannte Spalten" = Table.RenameColumns (
    #"In Tabelle konvertiert", 
    { { "Column1", "Datum" } }

  )
, 

  #"Geänderter Typ" = Table.TransformColumnTypes (
    #"Umbenannte Spalten", 
    { { "Datum", type date } }

  )
, 

  #"Add Jahr" = Table.AddColumn ( #"Geänderter Typ", "Jahr", each Date.Year ( [Datum] ) ), 

  #"Add Monat" = Table.AddColumn ( #"Add Jahr", "Monat", each Date.Month ( [Datum] ) ), 

  #"Sortierte Zeilen" = Table.Sort ( #"Add Monat", { { "Datum", Order.Descending } } )

in

  #"Sortierte Zeilen"

16

result = List.Generate ( () => 10, each _ >= 1, each ( _ - 1 ), each _ )

17

let
  Quelle = Json.Document ( Web.Contents ( "http://worldtimeapi.org/api/timezone/Europe/Berlin" ) ), 
  #"In Tabelle konvertiert" = Record.ToTable ( Quelle ), 

  #"Gefilterte Zeilen" = Table.SelectRows (
    #"In Tabelle konvertiert", 
    each ( [Name] = "datetime" )

  )
, 

  #"Entfernte Spalten" = Table.RemoveColumns ( #"Gefilterte Zeilen", { "Name" } ), 

  #"Umbenannte Spalten" = Table.RenameColumns (
    #"Entfernte Spalten", 
    { { "Value", "LastUpdateTS" } }

  )
, 

  #"Geänderter Typ" = Table.TransformColumnTypes (
    #"Umbenannte Spalten", 
    { { "LastUpdateTS", type datetimezone } }

  )
, 

  #"Hinzugefügte benutzerdefinierte Spalte" = Table.AddColumn (
    #"Geänderter Typ", 
    "LastUpdateDate", 

    each DateTime.Date ( [LastUpdateTS] )

  )

in

  #"Hinzugefügte benutzerdefinierte Spalte"

18

let
  MyFunc = ( x, y ) => x + y

in

  MyFunc

19

( startDate as date, endDate as date ) =>
  let
    Start = startDate, 
    Ende = endDate, 

    #"Anzahl Tage" = Duration.Days ( Ende - Start ) + 1, 

    Quelle = List.Dates ( Start, #"Anzahl Tage", #duration ( 1, 0, 0, 0 ) ), 

    #"In Tabelle konvertiert" = Table.FromList (
      Quelle, 
      Splitter.SplitByNothing(), 

      null, 

      null, 

      ExtraValues.Error

    )
, 

    #"Umbenannte Spalten" = Table.RenameColumns (
      #"In Tabelle konvertiert", 
      { { "Column1", "Datum" } }

    )
, 

    #"Geänderter Typ" = Table.TransformColumnTypes (
      #"Umbenannte Spalten", 
      { { "Datum", type date } }

    )
, 

    #"Add Jahr" = Table.AddColumn ( #"Geänderter Typ", "Jahr", each Date.Year ( [Datum] ) ), 

    #"Add Monat" = Table.AddColumn ( #"Add Jahr", "Monat", each Date.Month ( [Datum] ) ), 

    #"Sortierte Zeilen" = Table.Sort ( #"Add Monat", { { "Datum", Order.Descending } } )

  in

    #"Sortierte Zeilen"

20

result = List.Generate ( () => 10, ( _ ) => _ >= 1, ( _ ) => ( _ - 1 ), ( _ ) => _ )

21

let
  Quelle = Abfrage2 ( 1, 2 )

in

  Quelle

22

result
  =
 List.Generate ( () => [ zahl = 10 ], each [zahl] >= 1, each [ zahl = [zahl] - 1 ], each [zahl] )

23

let
  MyFunc = ( x as number, y as number ) => x + y

in

  MyFunc

24

() => [ PageNumber = 0, Func = null ]

25

let
  MyFunc = ( netValue as number ) =>
    let
      MwSt          = 0.19, 
      valueAfterTax = netValue * ( 1 + MwSt )

    in

      valueAfterTax

in

  MyFunc

26

each ( try _[Func] )[HasError] = false

27

each [ PageNumber = _[PageNumber] + 1, Func = fnNbaPlayerForPage ( [PageNumber] + 1 ) ]

28

each _[Func]

29

let
  // Import AU ArTrnDetails db
  AUSource = Sql.Database ( "sqlserver.xxxxxx.local", "EncoreCompanyR" ), 
  AUdbo_ArTrnDetail = AUSource{[ Schema = "dbo", Item = "ArTrnDetail" ]}[Data], 

  AUChangeDateTimeToDate = Table.TransformColumnTypes (
    AUdbo_ArTrnDetail, 
    { { "InvoiceDate", type date } }

  )
, 

  AUFilterDatesPerParameter = Table.SelectRows (
    AUChangeDateTimeToDate, 
    each [InvoiceDate] >= StartTableDates

  )
, 

  AURemovedOtherColumns = Table.SelectColumns (
    AUFilterDatesPerParameter, 
    {
      "TrnYear", 
      "TrnMonth", 

      "Invoice", 

      "InvoiceDate", 

      "Branch", 

      "Salesperson", 

      "Customer", 

      "StockCode", 

      "Area", 

      "ProductClass", 

      "Warehouse", 

      "CustomerClass", 

      "QtyInvoiced", 

      "NetSalesValue", 

      "CostValue", 

      "GlYear", 

      "GlPeriod", 

      "PostCurrency", 

      "PostConvRate"

    }

  )
, 

  AURemoveICSales = Table.SelectRows (
    AURemovedOtherColumns, 
    each not Text.StartsWith ( [Branch], "9" )

  )
, 

  AUCleanStkCodes = Table.SelectRows (
    AURemoveICSales, 
    each not Text.Contains ( [StockCode], "Pric" ) and [StockCode] <> " "

  )
, 

  // Import EU ArTrnDetails db
  EUSource = Sql.Database ( "sqlserver.xxxxxx.local", "SysproCompanyE" ), 
  EUdbo_ArTrnDetail = EUSource{[ Schema = "dbo", Item = "ArTrnDetail" ]}[Data], 

  EUChangeDateTimeToDate = Table.TransformColumnTypes (
    EUdbo_ArTrnDetail, 
    { { "InvoiceDate", type date } }

  )
, 

  EUFilterDatesPerParameter = Table.SelectRows (
    EUChangeDateTimeToDate, 
    each [InvoiceDate] >= StartTableDates

  )
, 

  EURemovedOtherColumns = Table.SelectColumns (
    EUFilterDatesPerParameter, 
    {
      "TrnYear", 
      "TrnMonth", 

      "Invoice", 

      "InvoiceDate", 

      "Branch", 

      "Salesperson", 

      "Customer", 

      "StockCode", 

      "Area", 

      "ProductClass", 

      "Warehouse", 

      "CustomerClass", 

      "QtyInvoiced", 

      "NetSalesValue", 

      "CostValue", 

      "GlYear", 

      "GlPeriod", 

      "PostCurrency", 

      "PostConvRate"

    }

  )
, 

  EURemoveICSales = Table.SelectRows (
    EURemovedOtherColumns, 
    each not Text.StartsWith ( [Branch], "9" )

  )
, 

  EUCleanStkCodes = Table.SelectRows (
    EURemoveICSales, 
    each not Text.Contains ( [StockCode], "Pric" ) and [StockCode] <> " "

  )
, 

  // Import US ArTrnDetails db
  USSource = Sql.Database ( "sqlserver.xxxxxx.local", "SysproCompanyU" ), 
  USdbo_ArTrnDetail = USSource{[ Schema = "dbo", Item = "ArTrnDetail" ]}[Data], 

  USChangeDateTimeToDate = Table.TransformColumnTypes (
    USdbo_ArTrnDetail, 
    { { "InvoiceDate", type date } }

  )
, 

  USFilterDatesPerParameter = Table.SelectRows (
    USChangeDateTimeToDate, 
    each [InvoiceDate] >= StartTableDates

  )
, 

  USRemovedOtherColumns = Table.SelectColumns (
    USFilterDatesPerParameter, 
    {
      "TrnYear", 
      "TrnMonth", 

      "Invoice", 

      "InvoiceDate", 

      "Branch", 

      "Salesperson", 

      "Customer", 

      "StockCode", 

      "Area", 

      "ProductClass", 

      "Warehouse", 

      "CustomerClass", 

      "QtyInvoiced", 

      "NetSalesValue", 

      "CostValue", 

      "GlYear", 

      "GlPeriod", 

      "PostCurrency", 

      "PostConvRate"

    }

  )
, 

  USRemoveICSales = Table.SelectRows (
    USRemovedOtherColumns, 
    each not Text.StartsWith ( [Branch], "9" )

  )
, 

  USCleanStkCodes = Table.SelectRows (
    USRemoveICSales, 
    each not Text.Contains ( [StockCode], "Pric" ) and [StockCode] <> " "

  )
, 

  // Import SI ArTrnDetails db
  SISource = Sql.Database ( "sqlserver.xxxxxx.local", "SysproCompanyS" ), 
  SIdbo_ArTrnDetail = SISource{[ Schema = "dbo", Item = "ArTrnDetail" ]}[Data], 

  SIChangeDateTimeToDate = Table.TransformColumnTypes (
    SIdbo_ArTrnDetail, 
    { { "InvoiceDate", type date } }

  )
, 

  SIFilterDatesPerParameter = Table.SelectRows (
    SIChangeDateTimeToDate, 
    each [InvoiceDate] >= StartTableDates

  )
, 

  SIRemovedOtherColumns = Table.SelectColumns (
    SIFilterDatesPerParameter, 
    {
      "TrnYear", 
      "TrnMonth", 

      "Invoice", 

      "InvoiceDate", 

      "Branch", 

      "Salesperson", 

      "Customer", 

      "StockCode", 

      "Area", 

      "ProductClass", 

      "Warehouse", 

      "CustomerClass", 

      "QtyInvoiced", 

      "NetSalesValue", 

      "CostValue", 

      "GlYear", 

      "GlPeriod", 

      "PostCurrency", 

      "PostConvRate"

    }

  )
, 

  SIRemoveICSales = Table.SelectRows (
    SIRemovedOtherColumns, 
    each not Text.StartsWith ( [Branch], "9" )

  )
, 

  SICleanStkCodes = Table.SelectRows (
    SIRemoveICSales, 
    each not Text.Contains ( [StockCode], "Pric" ) and [StockCode] <> " "

  )
, 

  // Combine above queries
  CombineAll = Table.Combine (
    { AUCleanStkCodes, EUCleanStkCodes, USCleanStkCodes, SICleanStkCodes }

  )
, 

  //Merge Salesperson data
  LinkSalePersonNameTable = Table.NestedJoin (
    CombineAll, 
    { "Branch", "Salesperson" }, 

    SalSalesperson, 

    { "Branch", "Salesperson" }, 

    "SalSalesperson", 

    JoinKind.LeftOuter

  )
, 

  ShowSalesPersonName = Table.ExpandTableColumn (
    LinkSalePersonNameTable, 
    "SalSalesperson", 

    { "Name" }, 

    { "SalespersonName" }

  )
, 

  RemoveSalepersonCode = Table.RemoveColumns ( ShowSalesPersonName, { "Salesperson" } ), 

  // Merge Foreign Exchange data
  TempChgGlYearToText = Table.TransformColumnTypes (
    RemoveSalepersonCode, 
    { { "GlYear", type text } }

  )
, 

  MergeAUDFx = Table.NestedJoin (
    TempChgGlYearToText, 
    { "Branch", "GlYear" }, 

    CurrencyAUDBase, 

    { "CompanyCode", "SYSPROYear" }, 

    "CurrencyAUDBase", 

    JoinKind.LeftOuter

  )
, 

  SelectCurrencyAUDBase = Table.ExpandTableColumn (
    MergeAUDFx, 
    "CurrencyAUDBase", 

    { "AUDFx" }, 

    { "AUDFx" }

  )
, 

  MergeEURFx = Table.NestedJoin (
    SelectCurrencyAUDBase, 
    { "Branch", "GlYear" }, 

    CurrencyEURBase, 

    { "CompanyCode", "SYSPROYear" }, 

    "CurrencyEURBase", 

    JoinKind.LeftOuter

  )
, 

  SelectCurrencyEURBase = Table.ExpandTableColumn (
    MergeEURFx, 
    "CurrencyEURBase", 

    { "EURFx" }, 

    { "EURFx" }

  )
, 

  MergeUSDFx = Table.NestedJoin (
    SelectCurrencyEURBase, 
    { "Branch", "GlYear" }, 

    CurrencyUSDBase, 

    { "CompanyCode", "SYSPROYear" }, 

    "CurrencyUSDBase", 

    JoinKind.LeftOuter

  )
, 

  SelectCurrencyUSDBase = Table.ExpandTableColumn (
    MergeUSDFx, 
    "CurrencyUSDBase", 

    { "USDFx" }, 

    { "USDFx" }

  )
, 

  // Create FEx values from Base Ccy values
  CreateNetSaleAUD = Table.AddColumn (
    SelectCurrencyUSDBase, 
    "NetSaleAUD", 

    each Number.Round ( [NetSalesValue] / [AUDFx], 2 )

  )
, 

  CreateNetSaleEUR = Table.AddColumn (
    CreateNetSaleAUD, 
    "NetSaleEUR", 

    each Number.Round ( [NetSalesValue] / [EURFx], 2 )

  )
, 

  CreateNetSaleUSD = Table.AddColumn (
    CreateNetSaleEUR, 
    "NetSaleUSD", 

    each Number.Round ( [NetSalesValue] / [USDFx], 2 )

  )
, 

  CreateNetCostAUD = Table.AddColumn (
    CreateNetSaleUSD, 
    "NetCostAUD", 

    each Number.Round ( [CostValue] / [AUDFx], 2 )

  )
, 

  CreateNetCostEUR = Table.AddColumn (
    CreateNetCostAUD, 
    "NetCostEUR", 

    each Number.Round ( [CostValue] / [EURFx], 2 )

  )
, 

  CreateNetCostUSD = Table.AddColumn (
    CreateNetCostEUR, 
    "NetCostUSD", 

    each Number.Round ( [CostValue] / [USDFx], 2 )

  )
, 

  ChangedType = Table.TransformColumnTypes (
    CreateNetCostUSD, 
    {
      { "NetSaleAUD", type number }, 
      { "NetSaleEUR", type number }, 

      { "NetSaleUSD", type number }, 

      { "NetCostAUD", type number }, 

      { "NetCostEUR", type number }, 

      { "NetCostUSD", type number }, 

      { "GlYear", type number }

    }

  )
, 

  // Clean tables of redundant Columns
  FinalSalesDetails = Table.RemoveColumns (
    ChangedType, 
    { "PostCurrency", "PostConvRate", "AUDFx", "EURFx", "USDFx" }

  )

in

  FinalSalesDetails

30

let
  Source = Excel.Workbook (
    File.Contents ( "H:\Produkte\PowerBI_Training\Datenquellen\Datumstabelle_mit_445_Periode.xlsx" ), 
    null, 

    true

  )
, 

  Sheet = Source{[ Item = "Kalender", Kind = "Sheet" ]}[Data], 

  #"Promoted Headers" = Table.PromoteHeaders ( Sheet ), 

  #"Renamed Columns" = Table.RenameColumns (
    #"Promoted Headers", 
    {
      { "DateKey", "DateKey" }, 
      { "Year", "Year" }, 

      { "Quarter", "Quarter" }, 

      { "QuarterAndYear", "QuarterAndYear" }, 

      { "Quartername", "Quartername" }, 

      { "Month", "Month" }, 

      { "MonthAndYear", "MonthAndYear" }, 

      { "MonthName", "MonthName" }, 

      { "MonthNameAndYear", "MonthNameAndYear" }, 

      { "DayOfYear", "DayOfYear" }, 

      { "DayOfMonth", "DayOfMonth" }, 

      { "Weekday", "Weekday" }, 

      { "WeekdayName", "WeekdayName" }, 

      { "WeekdayShortName", "WeekdayShortName" }, 

      { "Periode 4-4-5", "Periode 4-4-5" }, 

      { "Name Periode 4-4-5", "Name Periode 4-4-5" }

    }

  )
, 

  #"Changed Type" = Table.TransformColumnTypes (
    #"Renamed Columns", 
    {
      { "DateKey", type datetime }, 
      { "Year", Int64.Type }, 

      { "Quarter", type number }, 

      { "QuarterAndYear", type text }, 

      { "Quartername", type text }, 

      { "Month", type number }, 

      { "MonthAndYear", type text }, 

      { "MonthName", type text }, 

      { "MonthNameAndYear", type text }, 

      { "DayOfYear", type number }, 

      { "DayOfMonth", type number }, 

      { "Weekday", type number }, 

      { "WeekdayName", type text }, 

      { "WeekdayShortName", type text }, 

      { "Periode 4-4-5", type number }, 

      { "Name Periode 4-4-5", type text }

    }

  )
, 

  #"Gefilterte Zeilen" = Table.SelectRows (
    #"Changed Type", 
    each [Year] >= 2011 and [Year] <= 2015

  )
, 

  #"Geänderter Typ" = Table.TransformColumnTypes (
    #"Gefilterte Zeilen", 
    { { "Quarter", Int64.Type } }

  )

in

  #"Geänderter Typ"

31

let
  Quelle = PostgreSQL.Database ( "localhost", "postgres" ), 
  banken = Text.Split ( GetParameter ( "SampleDev", "Bank_Select" ), "," ), 

  tables = Table.SelectRows (
    Quelle, 
    (
      ( t ) =>
        t[Schema]
          =
 "ru_output"

          and
 Text.EndsWith ( t[Name], "_default_info_kde" )

          and
 List.Contains ( banken, Text.TrimStart ( Text.Range ( t[Item], 1, 3 ), "0" ) )

    )

  )
, 

  colNames = Table.ColumnNames ( List.First ( tables[Data] ) ), 

  end = Date.EndOfMonth ( DateTime.Date ( GetParameter ( "SampleDev", "AnalysisKeydate" ) ) ), 

  start = Date.EndOfMonth ( Date.AddMonths ( end, - 12 - 13 ) ), 

  allPivotedColumnNames = List.Select (
    colNames, 
    ( x ) =>
      Text.StartsWith ( x, "def_rd_20" )
        or
 Text.StartsWith ( x, "ausfall_20" )

        or
 Text.StartsWith ( x, "ezko_obligo_20" )

  )
, 

  pivotedColumnNames = List.Select (
    allPivotedColumnNames, 
    ( cn ) =>  (
      let
        startOfMonthString = Text.PositionOf ( cn, "_20" ) + 1, 
        colDate = Date.EndOfMonth (
          #date (
            Number.FromText ( Text.Range ( cn, startOfMonthString, 4 ) ), 
            Number.FromText ( Text.Range ( cn, startOfMonthString + 5 ) ), 

            1

          )

        )
, 

        isRelevant = colDate <= end and colDate > start

      in

        isRelevant

    )

  )
, 

  relevantColumns = List.Combine (
    {
      {
        "bank", 
        "kundnr", 

        "nacebran", 

        "swrechtf", 

        "kdsegbasel", 

        "kundart", 

        "filiale_n", 

        "ru_nummer_auto", 

        "ru_name", 

        "ist_prio_ru_kundnr", 

        "kundenkeys_ru", 

        "rat_segment_ru_agg", 

        "rat_segment_kd_agg", 

        "seg_misch"

      }
, 

      pivotedColumnNames

    }

  )
, 

  relevantData = Table.ExpandTableColumn ( tables, "Data", relevantColumns ), 

  onlyUsedColumns = Table.SelectColumns ( relevantData, relevantColumns ), 

  combinedColumnData = List.Generate (
    () =>  [
      ausfallCols = List.Select ( pivotedColumnNames, each Text.StartsWith ( _, "ausfall" ) ), 
      table = onlyUsedColumns, 

      generatedCols = {}, 

      count = List.Count (
        List.Select ( pivotedColumnNames, each Text.StartsWith ( _, "ausfall" ) )

      )

    ]
, 

    ( cur ) => cur[count] >= 0, 

    ( cur ) =>
      let
        ausfallCol = List.First ( cur[ausfallCols] ), 
        curColMonth = Text.Replace ( ausfallCol, "ausfall_", "" ), 

        defrdCol = "def_rd_" & curColMonth, 

        ezkoCol = "ezko_obligo_" & curColMonth, 

        result = [
          ausfallCols = List.RemoveFirstN ( cur[ausfallCols] ), 
          count = cur[count] - 1, 

          generatedCols = List.Combine ( { cur[generatedCols], { curColMonth } } ), 

          table = 
            let

              added = Table.AddColumn (
                cur[table], 
                curColMonth, 

                ( row ) =>
                  Number.ToText ( Record.Field ( row, ausfallCol ) )
                    &
 "++"

                    &
 Number.ToText ( Record.Field ( row, defrdCol ) )

                    &
 "++"

                    &
 Record.Field ( row, ezkoCol )

              )
, 

              removed = Table.RemoveColumns ( added, { ausfallCol, defrdCol, ezkoCol } )

            in

              removed

        ]

      in

        result

  )
, 

  combinedColumnsTable = List.Last ( combinedColumnData )[table], 

  combinedPivotedColumns = List.Last ( combinedColumnData )[generatedCols], 

  unpivoted = Table.Unpivot ( combinedColumnsTable, combinedPivotedColumns, "Monat", "PivotValue" ), 

  dividedCol = Table.SplitColumn (
    unpivoted, 
    "PivotValue", 

    Splitter.SplitTextByEachDelimiter ( { "++", "++" }, QuoteStyle.Csv, false ), 

    { "ausfall", "IsDefault", "ezko_obligo" }

  )
, 

  mitStichtag = Table.AddColumn (
    dividedCol, 
    "Stichtag", 

    each Date.EndOfMonth (
      #date (
        Number.FromText ( Text.Range ( [Monat], 0, 4 ) ), 
        Number.FromText ( Text.Range ( [Monat], 5 ) ), 

        1

      )

    )

  )
, 

  removedColumns = Table.RemoveColumns ( mitStichtag, { "Monat" } ), 

  changedTypes = Table.TransformColumnTypes (
    removedColumns, 
    { { "IsDefault", type number }, { "ausfall", type number }, { "ezko_obligo", type number } }

  )
, 

  defaultFilter = Table.SelectRows ( changedTypes, each [IsDefault] = 0 or [IsDefault] = 1 )

in

  defaultFilter

32

each (
  try
    _[Func]

  otherwise
    422222222222222222222222222222222222222222222222222222222222222222222222222222

)
[HasError]

  =
 false

33

let
  Quelle = DateTime.FixedLocalNow(), 
  #"Result" = #table (
    type table [
      #"Update TS" = datetime, 
      #"Update Date" = date, 

      #"Update Time" = time, 

      #"Test" = time

    ]
, 

    { { Quelle, DateTime.Date ( Quelle ), DateTime.Time ( Quelle ) } }

  )

in

  #"Result"

34

let
  Quelle = Table.FromRows (
    Json.Document (
      Binary.Decompress (
        Binary.FromText (
          "bdJbCoQwDIXhrQw+D5Kc2ttsRdz/NsbaPJyagC+mfBB+cp4bRMv23VB3Sfv4+Qh+Ivd3T5PsKq/p9X1QHagYqozQDVWH2njOhtqCmqHmUB+LqKG+oGqovxFskQdBFpQn4qkhnYtMpEsINaQOgeoBUT2eGkpUDymqx1NDB4XAEYXgqaE8F5koL6gYyg4VrlfCeu4iULleDeu5i0DjEC0M4S4Cnc4IPTojuItIQvWSRPV4el1/", 
          BinaryEncoding.Base64

        )
, 

        Compression.Deflate

      )

    )
, 

    let
      _t = ( ( type text ) meta [ Serialized.Text = true ] )

    in

      type table [ Jahr = _t, StartSommerzeit = _t, StartWinterzeit = _t ]

  )
, 

  #"Geänderter Typ" = Table.TransformColumnTypes (
    Quelle, 
    {
      { "Jahr", Int64.Type }, 
      { "StartSommerzeit", type datetime }, 

      { "StartWinterzeit", type datetime }

    }

  )

in

  #"Geänderter Typ"

35

( #"Input Value" as text, Like as logical ) as any =>
  let
    Values = {
      { "Aus", "Australia" }, 
      { "Palestin", "Palestine" }, 

      { "Macao", "Macau" }, 

      { "Congo", "Republic of the Congo" }, 

      { "Taiwan", "Taiwan" }, 

      { "Verde", "Cabo Verde" }, 

      { "Hong Kong", "Hong Kong" }, 

      { "Iran", "Iran" }, 

      { "China", "China" }, 

      { "Moldova", "Moldova" }, 

      { "Bahamas", "Bahamas" }, 

      { "Gambia", "Gambia" }, 

      { "UK", "United Kingdom" }, 

      { "US", "United States" }, 

      { "Viet Nam", "Vietnam" }, 

      { "South", "South Korea" },  //asd

      { "Czech", "Czech Republic" }, 

      { "Ship", null },  /* test */

      { "Princess", null },  //test

      { "The", null }

    }
, 

    //test
    SWITCH = List.First (
      List.Select (
        /* test */
        Values, 
        each 
          if
 Like then
            Text.Contains ( #"Input Value", _{0}, Comparer.OrdinalIgnoreCase )

          else

            _{0} = #"Input Value"

      )

    )
{1}?
, 

    Result = if SWITCH = null then #"Input Value" else SWITCH

  in

    Result

36

let
  Fonte = {
    Lines.FromBinary (
      Web.Contents ( "https://www.camara.leg.br/deputados/quem-sao/resultado?pagina=1" ), 
      null, 

      null, 

      65001

    )

  }
, 

  Fonte1 = Fonte{0}, 

  #"Convertido para Tabela" = Table.FromList (
    Fonte1, 
    Splitter.SplitByNothing(), 

    { "pagina" }, 

    null, 

    ExtraValues.Error

  )
, 

  #"Linhas Filtradas" = Table.SelectRows (
    #"Convertido para Tabela", 
    each Text.Contains ( [pagina], "Exibindo resultados de" )

  )
, 

  #"Extrair Numero de itens" = Table.TransformColumns (
    #"Linhas Filtradas", 
    { { "pagina", each Text.BetweenDelimiters ( _, "5 de ", " encontrados" ), type text } }

  )
, 

  #"Gera Lista de Paginas" = Table.TransformColumns (
    #"Extrair Numero de itens", 
    { { "pagina", each { 1 .. ( Number.RoundUp ( Value.FromText ( _ ) / 25, 0 ) ) }, type text } }

  )
, 

  Paginas = Table.FromList (
    #"Gera Lista de Paginas"{0}[pagina], 
    Splitter.SplitByNothing(), 

    { "pagina" }, 

    null, 

    ExtraValues.Error

  )
, 

  #"Tipo Alterado" = Table.TransformColumnTypes ( Paginas, { { "pagina", type text } } ), 

  #"Dados da Camara" = Table.AddColumn (
    #"Tipo Alterado", 
    "Personalizar", 

    each Lines.FromBinary (
      Web.Contents (
        "https://www.camara.leg.br/deputados/quem-sao/resultado", 
        [ Query = [ pagina = [pagina] ] ]

      )
, 

      null, 

      null, 

      65001

    )

  )
, 

  #"Mostra a Pagina" = Table.ExpandListColumn ( #"Dados da Camara", "Personalizar" ), 

  Deputados = Table.AddColumn (
    #"Mostra a Pagina", 
    "deputado", 

    each 
      if
 Text.Contains ( [Personalizar], " <a href=""https://www.camara.leg.br/deputados/" ) then
        [Personalizar]

      else

        null

  )
, 

  Fotos = Table.AddColumn (
    Deputados, 
    "Fotos", 

    each 
      if
 Text.Contains ( [Personalizar], "https://www.camara.leg.br/internet/deputado/bandep/" ) then
        [Personalizar]

      else

        null

  )
, 

  Status = Table.AddColumn (
    Fotos, 
    "Status", 

    each 
      if
 Text.Contains ( [Personalizar], "lista-resultados__info-exercicio atuando" ) then
        [Personalizar]

      else

        null

  )
, 

  #"Preenchido Abaixo" = Table.FillDown ( Status, { "deputado", "Fotos", "Status" } ), 

  #"Colunas Removidas" = Table.RemoveColumns ( #"Preenchido Abaixo", { "pagina", "Personalizar" } ), 

  #"Linhas Filtradas1" = Table.SelectRows (
    #"Colunas Removidas", 
    each [deputado] <> null and [deputado] <> ""

  )
, 

  #"Linhas Filtradas2" = Table.SelectRows (
    #"Linhas Filtradas1", 
    each [Fotos] <> null and [Fotos] <> ""

  )
, 

  #"Duplicatas Removidas" = Table.Distinct ( #"Linhas Filtradas2" ), 

  TiraLixo = Table.SelectRows (
    #"Duplicatas Removidas", 
    each (
      [deputado]
        <>
 " <a href=""https://www.camara.leg.br/deputados/quem-sao"">"

    )

  )
, 

  #"Texto Extraído Entre os Delimitadores1" = Table.TransformColumns (
    TiraLixo, 
    { { "deputado", each Text.BetweenDelimiters ( _, "deputados/", "</a>" ), type text } }

  )
, 

  FotosLink = Table.TransformColumns (
    #"Texto Extraído Entre os Delimitadores1", 
    { { "Fotos", each Text.BetweenDelimiters ( _, """", """" ), type text } }

  )
, 

  StatusAtual = Table.TransformColumns (
    FotosLink, 
    { { "Status", each Text.BetweenDelimiters ( _, ">", "<" ), type text } }

  )
, 

  #"Id e Deputado" = Table.SplitColumn (
    StatusAtual, 
    "deputado", 

    Splitter.SplitTextByEachDelimiter ( { """>" }, QuoteStyle.None, false ), 

    { "ID", "Deputado" }

  )
, 

  #"Duplicatas Removidas1" = Table.Distinct ( #"Id e Deputado", { "ID" } )

in

  #"Duplicatas Removidas1"

37

let
  //halloe
  c = type [
    k = number, 
    h = number, 

    i = number, 

    j = number, 

    k = number /* aaaaaaaaaaaaaaaaaaaaaaaaaaasd */, 

    ...

  ]
,  /*asd*/ //test

  //asd2
  d = type [ ...],  //asd
  k = 1 //test comment

in

  k

38

let
  //halloe
  d = type [ k = number /* asd */,  ... ], 
  k = 1 //test comment

in

  k

39

let
  MyFunc = ( netValue as number /*test*/ ) =>
    let
      /* asd */
      //tests
      MwSt = 0.19,  //tests
      /* testing */
      valueAfterTax /* a */ = netValue * ( 1 + MwSt ) //testasdaaaaaaaaaaaaaaaaaaaaaaa

    in
 //tst
      //iss
      /*asd*/
      //asd
      //asd
      valueAfterTax

in

  //123
  MyFunc //asd
  //test

40

let
  k = Table.SelectRows ( #"Merged Queries", each [Attributes]?[Hidden]? <> true )

in

  k

41

section Section1;
  A = "Hello";

42

/* Capitalize each word in the Item column in the Orders table. Text.Proper
is evaluated for each Item in each table row. */
let
  Orders = Table.FromRecords (
    {
      [ OrderID = 1, CustomerID = 1, Item = "fishing rod", Price = 100.0 ], 
      [ OrderID = 2, CustomerID = 1, Item = "1 lb. worms", Price = 5.0 ], 

      [ OrderID = 3, CustomerID = 2, Item = "fishing net", Price = 25.0 ]

    }

  )
, 

  #"Capitalized Each Word" = Table.TransformColumns ( Orders, { "Item", Text.Proper } )

in

  #"Capitalized Each Word"

43

// Comment prior to let statement
let
  Source = ""

in

  Source

44

let
  Источник = Csv.Document (
    Web.Contents (
      "https://www.liveinternet.ru/stat/ru/searches.csv?slice=mobile&id=4&id=13&id=5&id=total&period=month&graph=csv"

    )
, 

    [ Delimiter = ";", Columns = 5, Encoding = 65001, QuoteStyle = QuoteStyle.None ]

  )
, 

  #"Повышенные заголовки" = Table.PromoteHeaders ( Источник, [ PromoteAllScalars = true ] ), 

  #"Измененный тип" = Table.TransformColumnTypes (
    #"Повышенные заголовки", 
    {
      { "Дата", type date }, 
      { "Google", Int64.Type }, 

      { "Яндекс", Int64.Type }, 

      { "Search.Mail.ru", Int64.Type }, 

      { "всего", Int64.Type }

    }

  )

in

  #"Измененный тип"

45

// Function exp_by_squaring(x, n)
// if n < 0 then return exp_by_squaring(1 / x, -n);
// else if n = 0 then return 1;
// else if n = 1 then return x ;
// else if n is even then return exp_by_squaring(x * x, n / 2);
// else if n is odd then return x * exp_by_squaring(x * x, (n - 1) / 2);
let
  isEven = ( x as number ) => Number.Mod ( x, 2 ) = 0, 
  k = if p = 0 then 1 else 0, 

  pow = ( x as number, p as number ) =>
    if p = 0 then
      1

    else
 if p < 0 then
      error "negative power not supported"

    else

      x * @pow ( x, p - 1 )
, 

  fastPow = ( x as number, p as number ) =>
    if p = 0 then
      1

    else
 if p < 0 then
      error "negative power not supported"

    else
 if isEven ( p ) then
      @fastPow ( x * x, p / 2 )

    else

      x * @fastPow ( x * x, ( p - 1 ) / 2 )

in

  fastPow ( 2, 8 )
  // Function exp_by_squaring(x, n)
  // if n < 0 then return exp_by_squaring(1 / x, -n);
  // else if n = 0 then return 1;
  // else if n = 1 then return x ;
  // else if n is even then return exp_by_squaring(x * x, n / 2);
  // else if n is odd then return x * exp_by_squaring(x * x, (n - 1) / 2);

46

let
  ValueToText = ( value, optional depth ) =>
    let
      _canBeIdentifier = ( x ) =>
        let
          keywords = {
            "and", 
            "as", 

            "each", 

            "else", 

            "error", 

            "false", 

            "if", 

            "in", 

            "is", 

            "let", 

            "meta", 

            "not", 

            "otherwise", 

            "or", 

            "section", 

            "shared", 

            "then", 

            "true", 

            "try", 

            "type"

          }
, 

          charAlpha = ( c as number ) =>
            ( c >= 65 and c <= 90 ) or ( c >= 97 and c <= 122 ) or c = 95
, 

          charDigit = ( c as number ) => c >= 48 and c <= 57

        in

          try
            charAlpha ( Character.ToNumber ( Text.At ( x, 0 ) ) )
              and
 List.MatchesAll (
                Text.ToList ( x ), 
                ( c ) =>
                  let
                    num = Character.ToNumber ( c )

                  in

                    charAlpha ( num ) or charDigit ( num )

              )

              and
 not List.MatchesAny ( keywords, ( li ) => li = x )

          otherwise
            false
, 

      Serialize.Binary = ( x ) => "#binary(" & Serialize ( Binary.ToList ( x ) ) & ") ", 

      Serialize.Date = ( x ) =>
        "#date("
          &
 Text.From ( Date.Year ( x ) )

          &
 ", "

          &
 Text.From ( Date.Month ( x ) )

          &
 ", "

          &
 Text.From ( Date.Day ( x ) )

          &
 ") "
, 

      Serialize.Datetime = ( x ) =>
        "#datetime("
          &
 Text.From ( Date.Year ( DateTime.Date ( x ) ) )

          &
 ", "

          &
 Text.From ( Date.Month ( DateTime.Date ( x ) ) )

          &
 ", "

          &
 Text.From ( Date.Day ( DateTime.Date ( x ) ) )

          &
 ", "

          &
 Text.From ( Time.Hour ( DateTime.Time ( x ) ) )

          &
 ", "

          &
 Text.From ( Time.Minute ( DateTime.Time ( x ) ) )

          &
 ", "

          &
 Text.From ( Time.Second ( DateTime.Time ( x ) ) )

          &
 ") "
, 

      Serialize.Datetimezone = ( x ) =>
        let
          dtz = DateTimeZone.ToRecord ( x )

        in

          "#datetimezone("
            &
 Text.From ( dtz[Year] )

            &
 ", "

            &
 Text.From ( dtz[Month] )

            &
 ", "

            &
 Text.From ( dtz[Day] )

            &
 ", "

            &
 Text.From ( dtz[Hour] )

            &
 ", "

            &
 Text.From ( dtz[Minute] )

            &
 ", "

            &
 Text.From ( dtz[Second] )

            &
 ", "

            &
 Text.From ( dtz[ZoneHours] )

            &
 ", "

            &
 Text.From ( dtz[ZoneMinutes] )

            &
 ") "
, 

      Serialize.Duration = ( x ) =>
        let
          dur = Duration.ToRecord ( x )

        in

          "#duration("
            &
 Text.From ( dur[Days] )

            &
 ", "

            &
 Text.From ( dur[Hours] )

            &
 ", "

            &
 Text.From ( dur[Minutes] )

            &
 ", "

            &
 Text.From ( dur[Seconds] )

            &
 ") "
, 

      Serialize.Function = ( x ) =>
        _serialize_function_param_type (
          Type.FunctionParameters ( Value.Type ( x ) ), 
          Type.FunctionRequiredParameters ( Value.Type ( x ) )

        )

          &
 " as "

          &
 _serialize_function_return_type ( Value.Type ( x ) )

          &
 " => (...) "
, 

      Serialize.List = ( x ) =>
        "{"
          &
 List.Accumulate (
            x, 
            "", 

            ( seed, item ) =>
              if seed = "" then Serialize ( item ) else seed & ", " & Serialize ( item )

          )

          &
 "} "
, 

      Serialize.Logical = ( x ) => Text.From ( x ), 

      Serialize.Null = ( x ) => "null", 

      Serialize.Number = ( x ) =>
        let
          Text.From = ( i as number ) as text =>
            if Number.IsNaN ( i ) then
              "#nan"

            else
 if i = Number.PositiveInfinity then
              "#infinity"

            else
 if i = Number.NegativeInfinity then
              "-#infinity"

            else

              Text.From ( i )

        in

          Text.From ( x )
, 

      Serialize.Record = ( x ) =>
        "[ "
          &
 List.Accumulate (
            Record.FieldNames ( x ), 
            "", 

            ( seed, item ) =>  (
              if seed = "" then
                Serialize.Identifier ( item )

              else

                seed & ", " & Serialize.Identifier ( item )

            )

              &
 " = "

              &
 Serialize ( Record.Field ( x, item ) )

          )

          &
 " ] "
, 

      Serialize.Table = ( x ) =>
        "#table( type "
          &
 _serialize_table_type ( Value.Type ( x ) )

          &
 ", "

          &
 Serialize ( Table.ToRows ( x ) )

          &
 ") "
, 

      Serialize.Text = ( x ) => """" & _serialize_text_content ( x ) & """", 

      _serialize_text_content = ( x ) =>
        let
          escapeText = ( n as number ) as text =>
            "#(#)(" & Text.PadStart ( Number.ToText ( n, "X", "en-US" ), 4, "0" ) & ")"

        in

          List.Accumulate (
            List.Transform (
              Text.ToList ( x ), 
              ( c ) =>
                let
                  n = Character.ToNumber ( c )

                in

                  if n = 9 then
                    "#(#)(tab)"

                  else
 if n = 10 then
                    "#(#)(lf)"

                  else
 if n = 13 then
                    "#(#)(cr)"

                  else
 if n = 34 then
                    """"""

                  else
 if n = 35 then
                    "#(#)(#)"

                  else
 if n < 32 then
                    escapeText ( n )

                  else
 if n < 127 then
                    Character.FromNumber ( n )

                  else

                    escapeText ( n )

            )
, 

            "", 

            ( s, i ) => s & i

          )
, 

      Serialize.Identifier = ( x ) =>
        if _canBeIdentifier ( x ) then x else "#""" & _serialize_text_content ( x ) & """"
, 

      Serialize.Time = ( x ) =>
        "#time("
          &
 Text.From ( Time.Hour ( x ) )

          &
 ", "

          &
 Text.From ( Time.Minute ( x ) )

          &
 ", "

          &
 Text.From ( Time.Second ( x ) )

          &
 ") "
, 

      Serialize.Type = ( x ) => "type " & _serialize_typename ( x ), 

      _serialize_typename = ( x, optional funtype as logical ) => /* Optional parameter: Is this being used as part of a function signature? */
        let
          isFunctionType = ( x as type ) =>
            try if Type.FunctionReturn ( x ) is type then true else false otherwise false
, 

          isTableType = ( x as type ) =>
            try if Type.TableSchema ( x ) is table then true else false otherwise false
, 

          isRecordType = ( x as type ) =>
            try if Type.ClosedRecord ( x ) is type then true else false otherwise false
, 

          isListType = ( x as type ) =>
            try if Type.ListItem ( x ) is type then true else false otherwise false

        in

          if funtype = null and isTableType ( x ) then
            _serialize_table_type ( x )

          else
 if funtype = null and isListType ( x ) then
            "{ " & @_serialize_typename ( Type.ListItem ( x ) ) & " }"

          else
 if funtype = null and isFunctionType ( x ) then
            "function " & _serialize_function_type ( x )

          else
 if funtype = null and isRecordType ( x ) then
            _serialize_record_type ( x )

          else
 if x = type any then
            "any"

          else

            let
              base = Type.NonNullable ( x )

            in

              ( if Type.IsNullable ( x ) then "nullable " else "" )
                &
 (
                  if base = type anynonnull then
                    "anynonnull"

                  else
 if base = type binary then
                    "binary"

                  else
 if base = type date then
                    "date"

                  else
 if base = type datetime then
                    "datetime"

                  else
 if base = type datetimezone then
                    "datetimezone"

                  else
 if base = type duration then
                    "duration"

                  else
 if base = type logical then
                    "logical"

                  else
 if base = type none then
                    "none"

                  else
 if base = type null then
                    "null"

                  else
 if base = type number then
                    "number"

                  else
 if base = type text then
                    "text"

                  else
 if base = type time then
                    "time"

                  else
 if base = type type then
                    "type"

                  /* Abstract types: */
                  else if base = type function then
                    "function"

                  else
 if base = type table then
                    "table"

                  else
 if base = type record then
                    "record"

                  else
 if base = type list then
                    "list"

                  else

                    "any /*Actually unknown type*/"

                )
, 

      _serialize_table_type = ( x ) =>
        let
          schema = Type.TableSchema ( x )

        in

          "table "
            &
 (
              if Table.IsEmpty ( schema ) then
                ""

              else

                "["
                  &
 List.Accumulate (
                    List.Transform (
                      Table.ToRecords ( Table.Sort ( schema, "Position" ) ), 
                      each Serialize.Identifier ( _[Name] ) & " = " & _[Kind]

                    )
, 

                    "", 

                    ( seed, item ) => ( if seed = "" then item else seed & ", " & item )

                  )

                  &
 "] "

            )
, 

      _serialize_record_type = ( x ) =>
        let
          flds = Type.RecordFields ( x )

        in

          if Record.FieldCount ( flds ) = 0 then
            "record"

          else

            "["
              &
 List.Accumulate (
                Record.FieldNames ( flds ), 
                "", 

                ( seed, item ) =>
                  seed
                    &
 ( if seed <> "" then ", " else "" )

                    &
 (
                      Serialize.Identifier ( item )
                        &
 "="

                        &
 _serialize_typename ( Record.Field ( flds, item )[Type] )

                    )

              )

              &
 ( if Type.IsOpenRecord ( x ) then ",..." else "" )

              &
 "]"
, 

      _serialize_function_type = ( x ) =>
        _serialize_function_param_type (
          Type.FunctionParameters ( x ), 
          Type.FunctionRequiredParameters ( x )

        )

          &
 " as "

          &
 _serialize_function_return_type ( x )
, 

      _serialize_function_param_type = ( t, n ) =>
        let
          funsig = Table.ToRecords (
            Table.TransformColumns (
              Table.AddIndexColumn ( Record.ToTable ( t ), "isOptional", 1 ), 
              { "isOptional", ( x ) => x > n }

            )

          )

        in

          "("
            &
 List.Accumulate (
              funsig, 
              "", 

              ( seed, item ) =>  ( if seed = "" then "" else seed & ", " )
                &
 ( if item[isOptional] then "optional " else "" )

                &
 Serialize.Identifier ( item[Name] )

                &
 " as "

                &
 _serialize_typename ( item[Value], true )

            )

            &
 ")"
, 

      _serialize_function_return_type = ( x ) =>
        _serialize_typename ( Type.FunctionReturn ( x ), true )
, 

      Serialize = ( x ) as text =>
        if x is binary then
          try Serialize.Binary ( x ) otherwise "null /*serialize failed*/"

        else
 if x is date then
          try Serialize.Date ( x ) otherwise "null /*serialize failed*/"

        else
 if x is datetime then
          try Serialize.Datetime ( x ) otherwise "null /*serialize failed*/"

        else
 if x is datetimezone then
          try Serialize.Datetimezone ( x ) otherwise "null /*serialize failed*/"

        else
 if x is duration then
          try Serialize.Duration ( x ) otherwise "null /*serialize failed*/"

        else
 if x is function then
          try Serialize.Function ( x ) otherwise "null /*serialize failed*/"

        else
 if x is list then
          try Serialize.List ( x ) otherwise "null /*serialize failed*/"

        else
 if x is logical then
          try Serialize.Logical ( x ) otherwise "null /*serialize failed*/"

        else
 if x is null then
          try Serialize.Null ( x ) otherwise "null /*serialize failed*/"

        else
 if x is number then
          try Serialize.Number ( x ) otherwise "null /*serialize failed*/"

        else
 if x is record then
          try Serialize.Record ( x ) otherwise "null /*serialize failed*/"

        else
 if x is table then
          try Serialize.Table ( x ) otherwise "null /*serialize failed*/"

        else
 if x is text then
          try Serialize.Text ( x ) otherwise "null /*serialize failed*/"

        else
 if x is time then
          try Serialize.Time ( x ) otherwise "null /*serialize failed*/"

        else
 if x is type then
          try Serialize.Type ( x ) otherwise "null /*serialize failed*/"

        else

          "[#_unable_to_serialize_#]"

    in

      try Serialize ( value ) otherwise "<serialization failed>"

in

  ValueToText ( "Some value" )

47

let
  k = "asd1"
    &
 "asd2"

    &
 "asd3"

    &
 "asd4"

    &
 "asd5"

    &
 "asd6"

    &
 "asd7"

    &
 "asd8"

    &
 "asd9"

    &
 "asd10"

    &
 "asd11"

    &
 "asd12"

    &
 "asd13"

    &
 "asd14"

    &
 "asd15"

    &
 "asd16"

    &
 "asd17"

    &
 "asd18"

    &
 "asd19"

    &
 "asd20"

    &
 "asd21"

    &
 "asd22"

    &
 "asd23"

    &
 "asd24"

    &
 "asd25"
, 

  Serialize.Record = ( x ) =>
    "[ "
      &
 List.Accumulate (
        Record.FieldNames ( x ), 
        "", 

        ( seed, item ) =>  (
          if seed = "" then
            Serialize.Identifier ( item )

          else

            seed & ", " & Serialize.Identifier ( item )

        )

          &
 " = "

          &
 Serialize ( Record.Field ( x, item ) )

      )

      &
 " ] "

in

  k

48

let
  Documentation = type function (
    tag as (
      type nullable text
        meta
 [
          Documentation.FieldCaption = "Tag to Format", 
          Documentation.SampleValues = { "wh2_dlc09_title_king" }

        ]

    )

  )
 as nullable text

    meta
 [
      Documentation.Name = "GQ_FormatTagForTitling", 
      Documentation.LongDescription = "Transforms a_database_tag into a Pretty Title. "
        &
 "Will replace underscores with spaces, and then apply Title Casing to every word. "
, 

      Documentation.Examples = {
        [
          Description = "Formats a tag.", 
          Code        = "GQ_FormatTagForTitling(""wh2_dlc09_title_king"")", 

          Result      = """Wh2 Dlc09 Title King"""

        ]
, 

        [
          Description = "Tags in ALL_CAPS will get Title Cased.", 
          Code        = "GQ_FormatTagForTitling(""EMPTY_EFFECT"")", 

          Result      = """Empty Effect"""

        ]

      }

    ]
, 

  FormatTagForTitling = ( tag as nullable text ) as nullable text =>
    if tag is null then
      tag

    else

      let
        ReplacedUnderscores = Text.Replace ( tag, "_", " " ), 
        Capitalized         = Text.Proper ( ReplacedUnderscores ), 

        Returned            = Capitalized

      in

        Returned
, 

  DocumentedCall = Value.ReplaceType ( FormatTagForTitling, Documentation )

in

  DocumentedCall

49

let
  FormatTagForTitling = ( tag as nullable text ) as nullable text =>
    if tag is null then
      tag

    else

      let
        ReplacedUnderscores = Text.Replace ( tag, "_", " " ), 
        Capitalized         = Text.Proper ( ReplacedUnderscores ), 

        Returned            = Capitalized

      in

        Returned
, 

  DocumentedCall = FormatTagForTitling

in

  DocumentedCall

50

let
  Source = "Start the description with an action verb and a 1-sentence description of what the function does. "
    &
 "<p>"

    &
 "Some HTML tags can be used. <br>Notably, these work: "

    &
 "<ul>"

    &
 "<li> <b>bold</b>"

    &
 "<li> <i>italics</i>"

    &
 "<li> <code>code</code>"

    &
 "</ul>"

    &
 "These do not work:"

    &
 "<ul>"

    &
 "<li> "

    &
 "<dl>"

    &
 " <dt>Definition List</dt>"

    &
 " <dd>A list of terms to define.</dd>"

    &
 ""

    &
 " <dt>Definition Term</dt>"

    &
 " <dd>Definition Definition.</dd>"

    &
 "</dl>"

    &
 "<li> <u>underlined</u>"

    &
 "<li> <strong>strong</strong>"

    &
 "<li> <em>emphasis</em>"

    &
 "<li> <mark>mark</mark>"

    &
 "<li> <small>small</small>"

    &
 "<li> <del>deleted</del>"

    &
 "<li> <ins>inserted</ins>"

    &
 "<li> <sub>subscript</sub>"

    &
 "<li> <sup>superscript</sup>"

    &
 "</ul>"

    &
 "<ol> <li>Ordered list</li></ol>"

    &
 "<br>"

    &
 "<h1>Headings</h1>"

    &
 "<br>"

    &
 "<hr>Horizontal rule</hr>"

    &
 "<br>"

    &
 "<q>Short quote</q>"

    &
 "<br>"

    &
 "<blockquote cite=""https://www.google.com"">Blockquote</blockquote>"

    &
 "<p>"

    &
 "Tables work, but not their headers:"

    &
 "<table>"

    &
 " <tr>"

    &
 " <th>Game</th>"

    &
 " <th>Year of Release</th>"

    &
 " </tr>"

    &
 " <tr>"

    &
 " <td>Three Kingdoms</td>"

    &
 " <td>2019</td>"

    &
 " </tr>"

    &
 " <tr>"

    &
 " <td>Thrones of Britannia</td>"

    &
 " <td>2019</td>"

    &
 " </tr>"

    &
 "</table>"

    &
 "<p>"

    &
 "Neither do tbody and thead:"

    &
 "<table>"

    &
 " <thead>"

    &
 " <tr>"

    &
 " <th>Game</th>"

    &
 " <th>Year of Release</th>"

    &
 " </tr>"

    &
 " </thead>"

    &
 " <tbody>"

    &
 " <tr>"

    &
 " <td>Three Kingdoms</td>"

    &
 " <td>2019</td>"

    &
 " </tr>"

    &
 " <tr>"

    &
 " <td>Thrones of Britannia</td>"

    &
 " <td>2019</td>"

    &
 " </tr>"

    &
 " </tbody>"

    &
 "</table>"

in

  Source

51

let
  Result
    =
 "[Key] [Name] [Differentiated] wh2_main_lothern Lothern Lothern wh2_dlc09_tomb_kings Tomb Kings Tomb Kings wh2_dlc09_tomb_kings_qb1 Tomb Kings Tomb Kings (Qb1) wh2_dlc09_tomb_kings_qb2 Tomb Kings Tomb Kings (Qb2)"

in

  Result

52

section HelloWorld;
  [ DataSource.Kind = "HelloWorld", Publish = "HelloWorld.Publish" ]
  shared
 HelloWorld.Contents = ( optional message as text ) =>
    let
      message = if ( message <> null ) then message else "Hello world"

    in

      message
;

  HelloWorld = [
    Authentication = [ Implicit = [] ], 
    Label          = Extension.LoadString ( "DataSourceLabel" )

  ]
;

  HelloWorld.Publish = [
    Beta = true, 
    ButtonText = { Extension.LoadString ( "FormulaTitle" ), Extension.LoadString ( "FormulaHelp" ) }, 

    SourceImage = HelloWorld.Icons, 

    SourceTypeImage = HelloWorld.Icons

  ]
;

  HelloWorld.Icons = [
    Icon16 = {
      Extension.Contents ( "HelloWorld16.png" ), 
      Extension.Contents ( "HelloWorld20.png" ), 

      Extension.Contents ( "HelloWorld24.png" ), 

      Extension.Contents ( "HelloWorld32.png" )

    }
, 

    Icon32 = {
      Extension.Contents ( "HelloWorld32.png" ), 
      Extension.Contents ( "HelloWorld40.png" ), 

      Extension.Contents ( "HelloWorld48.png" ), 

      Extension.Contents ( "HelloWorld64.png" )

    }

  ]
;

53

let
  k = null ?? 10

in

  k

54

let
   = 1

in

  

AnnotationsSample.pq

section TripPinAnnotations;
  // Data Source Kind description
  TripPin = [
    // TestConnection is required to enable the connector through the Gateway
    TestConnection = ( dataSourcePath ) => { "TripPin.Annotations" }, 
    Authentication = [ Anonymous = [] ], 

    Label = "AnnotationsSample"

  ]
;

  // Data Source UI publishing description
  TripPin.Publish = [
    Beta       = true, 
    Category   = "Other", 

    ButtonText = { "AnnotationsSample", "AnnotationsSample" }

  ]
;

  [ DataSource.Kind = "TripPin", Publish = "TripPin.Publish" ]
  shared
 TripPin.Annotations = () =>
    let
      serviceDocument = TripPin.ServiceDocument()

    in

      #table (
        { "Location", "Data" }, 
        {
          { "Entity Container Annotations", GetEntityContainerAnnotations ( serviceDocument ) }, 
          {
            "Resources", 
            Table.TransformColumns (
              serviceDocument, 
              {
                {
                  "Data", 
                  ( resource ) =>
                    if resource is function then
                      #table (
                        { "Location", "Data" }, 
                        {
                          {
                            "Function Import Annotations", 
                            GetFunctionImportAnnotations ( resource )

                          }
, 

                          {
                            "Parameter Annotations", 
                            Record.ToTable (
                              GetFunctionParameterAnnotations ( Value.Type ( resource ) )

                            )

                          }

                        }

                      )

                    else

                      #table (
                        { "Location", "Data" }, 
                        {
                          {
                            "Entity Set or Singleton", 
                            GetEntitySetOrSingletonAnnotations ( resource )

                          }
, 

                          {
                            "Entity Type", 
                            #table (
                              { "Location", "Data" }, 
                              {
                                { "Entity Type Annotations", GetEntityTypeAnnotations ( resource ) }, 
                                {
                                  "Property Annotations", 
                                  Record.ToTable ( GetEntityTypePropertyAnnotations ( resource ) )

                                }
, 

                                {
                                  "Function Annotations", 
                                  Table.AddColumn (
                                    Table.FromColumns (
                                      Table.ColumnsOfType ( resource, { Function.Type } ), 
                                      { "Name" }

                                    )
, 

                                    "Value", 

                                    each GetEntityTypeFunctionAnnotations ( resource, [Name] )

                                  )

                                }

                              }

                            )

                          }

                        }

                      )

                }

              }

            )

          }

        }

      )
;

  GetEntityContainerAnnotations = ( serviceDocument ) =>
    Value.Metadata ( Value.Type ( serviceDocument ) )
;

  GetEntitySetOrSingletonAnnotations = ( entitySetOrSingleton ) =>
    Value.Metadata ( entitySetOrSingleton )
;

  GetFunctionImportAnnotations = ( functionImport ) =>
    Value.Metadata ( Value.Type ( functionImport ) )
;

  GetEntityTypeAnnotations = ( entitySetOrSingleton ) =>
    let
      entityCollectionType  = Value.Type ( entitySetOrSingleton ), 
      entityType            = Type.TableRow ( entityCollectionType ), 

      entityTypeAnnotations = Value.Metadata ( entityType )[OData.Annotations]

    in

      entityTypeAnnotations
;

  GetEntityTypePropertyAnnotations = ( entitySetOrSingleton ) =>
    let
      entityCollectionType = Value.Type ( entitySetOrSingleton ), 
      entityType = 
        if
 entitySetOrSingleton is record then
          entityCollectionType

        else

          Type.TableRow ( entityCollectionType )
, 

      fieldAnnotations = Value.Metadata ( entityType )[OData.FieldAnnotations]

    in

      fieldAnnotations
;

  GetEntityTypeFunctionAnnotations = ( entitySetOrSingleton, functionName ) =>
    let
      entityCollectionType = Value.Type ( entitySetOrSingleton ), 
      entityType = 
        if
 entitySetOrSingleton is record then
          entityCollectionType

        else

          Type.TableRow ( entityCollectionType )
, 

      functionType = Type.TableColumn ( entityType, functionName ), 

      functionAnnotations = Value.Metadata ( functionType )

    in

      functionAnnotations
;

  GetFunctionParameterAnnotations = ( functionType ) =>
    let
      parameters = Type.FunctionParameters ( functionType ), 
      parametersTable = Record.ToTable ( parameters ), 

      parameterAnnotations = Table.TransformColumns (
        parametersTable, 
        { { "Value", Value.Metadata } }

      )

    in

      Record.FromTable ( parameterAnnotations )
;

  BaseUrl = "http://services.odata.org/v4/TripPinService/";

  // Without explicitly setting IncludeAnnotations or IncludeMetadataAnnotations,
  // annotations will not be made available
  TripPin.ServiceDocument = () =>
    OData.Feed ( BaseUrl, null, [ Implementation = "2.0", IncludeAnnotations = "*" ] ) as table
;

AnnotationsSample.query.pq

// Use this file to write queries to test your data connector
let
  result = TripPin.Annotations()

in

  result

DataWorldSwagger.pq

// This file contains your Data Connector logic
section DataWorldSwagger;
  // TODO: add your client id and secret to the embedded files
  client_id = Text.FromBinary ( Extension.Contents ( "client_id" ) )
;

  client_secret = Text.FromBinary ( Extension.Contents ( "client_secret" ) );

  redirect_uri = "https://oauth.powerbi.com/views/oauthredirect.html";

  windowWidth = 800;

  windowHeight = 800;

  BaseUrl = "https://api.data.world/v0";

  OAuthBaseUrl = "https://data.world/oauth";

  [ DataSource.Kind = "DataWorldSwagger", Publish = "DataWorldSwagger.Publish" ]
  shared
 DataWorldSwagger.Contents = () =>
    let
      credential = Extension.CurrentCredential(), 
      token = 
        if
 ( credential[AuthenticationKind] = "Key" ) then
          credential[Key]

        else

          credential[access_token]
, 

      headers = [ Authorization = "Bearer " & token ], 

      navTable = OpenApi.Document (
        Web.Contents ( "https://api.data.world/v0/swagger.json" ), 
        [ Headers = headers, ManualCredentials = true ]

      )

    in

      navTable
;

  // Data Source Kind description
  DataWorldSwagger = [
    // enable both OAuth and Key based auth
    Authentication = [
      OAuth = [ StartLogin = StartLogin, FinishLogin = FinishLogin, Refresh = Refresh ], 
      Key   = []

    ]
, 

    Label = Extension.LoadString ( "DataSourceLabel" )

  ]
;

  // Data Source UI publishing description
  DataWorldSwagger.Publish = [
    Beta         = true, 
    Category     = "Other", 

    ButtonText   = { Extension.LoadString ( "ButtonTitle" ), Extension.LoadString ( "ButtonHelp" ) }, 

    LearnMoreUrl = "https://data.world/"

  ]
;

  //
  // OAuth2 flow definition
  //
  StartLogin = ( resourceUrl, state, display ) =>
    let
      AuthorizeUrl = OAuthBaseUrl
        &
 "/authorize?"

        &
 Uri.BuildQueryString (
          [
            client_id     = client_id, 
            response_type = "code", 

            state         = state, 

            redirect_uri  = redirect_uri

          ]

        )

    in

      [
        LoginUri     = AuthorizeUrl, 
        CallbackUri  = redirect_uri, 

        WindowHeight = windowHeight, 

        WindowWidth  = windowWidth, 

        Context      = null

      ]
;

  FinishLogin = ( context, callbackUri, state ) =>
    let
      Parts = Uri.Parts ( callbackUri )[Query]

    in

      TokenMethod ( Parts[code], "authorization_code" )
;

  TokenMethod = ( code, grant_type ) =>
    let
      Response = Web.Contents (
        OAuthBaseUrl & "/access_token", 
        [
          Content = Text.ToBinary (
            Uri.BuildQueryString (
              [
                client_id     = client_id, 
                client_secret = client_secret, 

                code          = code, 

                grant_type    = grant_type, 

                redirect_uri  = redirect_uri

              ]

            )

          )
, 

          Headers = [
            #"Content-type" = "application/x-www-form-urlencoded", 
            #"Accept"       = "application/json"

          ]

        ]

      )
, 

      Parts = Json.Document ( Response )

    in

      Parts
;

  Refresh = ( resourceUrl, refresh_token ) => TokenMethod ( refresh_token, "refresh_token" );

DataWorldSwagger.query.pq

// Use this file to write queries to test your data connector
let
  result = DataWorldSwagger.Contents()

in

  result

DirectQueryForSQL.pq

section DirectSQL;
  [ DataSource.Kind = "DirectSQL", Publish = "DirectSQL.UI" ]
  shared
 DirectSQL.Database = ( server as text, database as text ) as table =>
    let
      // This record contains all of the connection string properties we
      // will set for this ODBC driver. The 'Driver' field is required for
      // all ODBC connections. Other properties will vary between ODBC drivers,
      // but generally take Server and Database properties. Note that
      // credential related properties will be set separately.
      ConnectionString = [
        Driver   = "SQL Server Native Client 11.0", 
        Server   = server, 

        Database = database

      ]
, 

      // Get the current credential, and check what type of authentication we're using
      Credential = Extension.CurrentCredential(), 
      // Credentials are passed to the ODBC driver using the CredentialConnectionString field.
      // If the user has selected SQL auth (i.e. UsernamePassword), we'll set the
      // UID and PWD connection string properties. This should be standard across ODBC drivers.
      // If the user has selected Windows auth, we'll set the Trusted_Connection property.
      // Trusted_Connection is specific to the SQL Server Native Client ODBC driver.
      // Other drivers might require additional connection string properties to be set.
      CredentialConnectionString = 
        if
 ( Credential[AuthenticationKind]? ) = "UsernamePassword" then
          [ UID = Credential[Username], PWD = Credential[Password] ]

        else
 if ( Credential[AuthenticationKind]? ) = "Windows" then
          [ Trusted_Connection = "Yes" ]

        // unknown authentication kind - return an 'unimplemented' error
        else
          ...
, 

      // Here our connector is wrapping M's Odbc.DataSource() function.
      //
      // The first argument will be the connection string. It can be passed in as a record,
      // or an actual text value. When using a record, M will ensure that the values will be
      // property encoded.
      //
      // The second argument is the options record which allows us to set the credential
      // connection string properties, and override default behaviors.
      OdbcDataSource = Odbc.DataSource (
        ConnectionString, 
        [
          // Pass the credential-specific part of the connection string
          CredentialConnectionString = CredentialConnectionString, 
          // Enables client side connection pooling for the ODBC driver.
          // Most drivers will want to set this value to true.
          ClientConnectionPooling = true, 
          // When HierarchialNavigation is set to true, the navigation tree
          // will be organized by Database -> Schema -> Table. When set to false,
          // all tables will be displayed in a flat list using fully qualified names.
          HierarchicalNavigation = true, 
          // Use the SqlCapabilities record to specify driver capabilities that are not
          // discoverable through ODBC 3.8, and to override capabilities reported by
          // the driver.
          SqlCapabilities = [
            SupportsTop            = true, 
            Sql92Conformance       = 8 /* SQL_SC_SQL92_FULL */, 

            GroupByCapabilities    = 4 /* SQL_GB_NO_RELATION */, 

            FractionalSecondsScale = 3

          ]
, 

          SoftNumbers = true, 

          HideNativeQuery = true, 

          // Use the SQLGetInfo record to override values returned by the driver.
          SQLGetInfo = [ SQL_SQL92_PREDICATES = 0x0000FFFF, SQL_AGGREGATE_FUNCTIONS = 0xFF ]

        ]

      )
, 

      // The first level of the navigation table will be the name of the database the user
      // passed in. Rather than repeating it again, we'll select it ({[Name = database]})
      // and access the next level of the navigation table.
      Database = OdbcDataSource{[ Name = database ]}[Data]

    in

      Database
;

  // Data Source definition
  DirectSQL = [
    TestConnection = ( dataSourcePath ) =>
      let
        json     = Json.Document ( dataSourcePath ), 
        server   = json[server], 

        database = json[database]

      in

        { "DirectSQL.Database", server, database }
, 

    Authentication = [ Windows = [], UsernamePassword = [] ], 

    Label = "Direct Query for SQL", 

    // This sample doesn't enable the use of SSL.
    SupportsEncryption = false

  ]
;

  // UI Export definition
  DirectSQL.UI = [
    SupportsDirectQuery = true,  // enables direct query
    Category            = "Database", 

    ButtonText          = { "Direct Query for SQL", "Direct Query via ODBC sample for SQL Server" }, 

    SourceImage         = DirectSQL.Icons, 

    SourceTypeImage     = DirectSQL.Icons

  ]
;

  DirectSQL.Icons = [
    Icon16 = {
      Extension.Contents ( "DirectQueryForSQL16.png" ), 
      Extension.Contents ( "DirectQueryForSQL20.png" ), 

      Extension.Contents ( "DirectQueryForSQL24.png" ), 

      Extension.Contents ( "DirectQueryForSQL32.png" )

    }
, 

    Icon32 = {
      Extension.Contents ( "DirectQueryForSQL32.png" ), 
      Extension.Contents ( "DirectQueryForSQL40.png" ), 

      Extension.Contents ( "DirectQueryForSQL48.png" ), 

      Extension.Contents ( "DirectQueryForSQL64.png" )

    }

  ]
;

DirectQueryForSQL.query.pq

//Use this file to write queries.
let
  Host       = "localhost", 
  Database   = "AdventureWorksDW2012", 

  Source     = DirectSQL.Database ( Host, Database ), 

  dbo_Schema = Source{[ Name = "dbo", Kind = "Schema" ]}[Data]

in

  dbo_Schema

github.pq

section GithubSample;
  //
  // OAuth configuration settings
  //
  // You MUST replace the values below for values for your own application.
  // Signin to GitHub and navigate to https://github.com/settings/applications/new.
  // Follow the steps and obtain your client_id and client_secret.
  // Set your Redirect URI value in your application registration to match the value below.
  // Update the values within the "client_id" and "client_secret" files in the project.
  //
  // Note: due to incompatibilities with the Internet Explorer control used in Visual Studio,
  // you will not be able to authorize a new github application during the OAuth flow. You can workaround
  // this by loading your extension in Power BI Desktop, and completing the OAuth flow there.
  // Once the application has been authorized for a given user, then the OAuth flow will work when
  // run in Visual Studio.
  client_id = Text.FromBinary ( Extension.Contents ( "client_id" ) )
;

  client_secret = Text.FromBinary ( Extension.Contents ( "client_secret" ) );

  redirect_uri = "https://oauth.powerbi.com/views/oauthredirect.html";

  windowWidth = 1200;

  windowHeight = 1000;

  //
  // Exported functions
  //
  // These functions are exported to the M Engine (making them visible to end users), and associates
  // them with the specified Data Source Kind. The Data Source Kind is used when determining which
  // credentials to use during evaluation. Credential matching is done based on the function's parameters.
  // All data source functions associated to the same Data Source Kind must have a matching set of required
  // function parameters, including type, name, and the order in which they appear.
  [ DataSource.Kind = "GithubSample", Publish = "GithubSample.UI" ]
  shared
 GithubSample.Contents = Value.ReplaceType (
    Github.Contents, 
    type function ( url as Uri.Type ) as any

  )
;

  [ DataSource.Kind = "GithubSample" ]
  shared
 GithubSample.PagedTable = Value.ReplaceType (
    Github.PagedTable, 
    type function ( url as Uri.Type ) as nullable table

  )
;

  //
  // Data Source definition
  //
  GithubSample = [
    TestConnection = ( dataSourcePath ) => { "GithubSample.Contents", dataSourcePath }, 
    Authentication = [
      OAuth = [
        StartLogin  = StartLogin, 
        FinishLogin = FinishLogin, 

        Label       = Extension.LoadString ( "AuthenticationLabel" )

      ]

    ]

  ]
;

  //
  // UI Export definition
  //
  GithubSample.UI = [
    Beta = true, 
    ButtonText = { Extension.LoadString ( "FormulaTitle" ), Extension.LoadString ( "FormulaHelp" ) }, 

    SourceImage = GithubSample.Icons, 

    SourceTypeImage = GithubSample.Icons

  ]
;

  GithubSample.Icons = [
    Icon16 = {
      Extension.Contents ( "github16.png" ), 
      Extension.Contents ( "github20.png" ), 

      Extension.Contents ( "github24.png" ), 

      Extension.Contents ( "github32.png" )

    }
, 

    Icon32 = {
      Extension.Contents ( "github32.png" ), 
      Extension.Contents ( "github40.png" ), 

      Extension.Contents ( "github48.png" ), 

      Extension.Contents ( "github64.png" )

    }

  ]
;

  //
  // Github.Contents - retrieves a single page of data from github and sets a
  // Next link value as meta on the returned json response. We parse the json
  // result (which will be a list of records) into a table.
  //
  Github.Contents = ( url as text ) =>
    let
      content = Web.Contents ( url ), 
      link    = GetNextLink ( content ), 

      json    = Json.Document ( content ), 

      table   = Table.FromList ( json, Splitter.SplitByNothing() )

    in

      table meta [ Next = link ]
;

  Github.PagedTable = ( url as text ) =>
    Table.GenerateByPage (
      ( previous ) =>
        let
          // If we have a previous page, get its Next link from metadata on the page.
          next = if ( previous <> null ) then Value.Metadata ( previous )[Next] else null, 
          // If we have a next link, use it, otherwise use the original URL that was passed in.
          urlToUse = if ( next <> null ) then next else url, 
          // If we have a previous page, but don't have a next link, then we're done paging.
          // Otherwise retrieve the next page.
          current = 
            if
 ( previous <> null and next = null ) then
              null

            else

              Github.Contents ( urlToUse )
, 

          // If we got data back from the current page, get the link for the next page
          link = if ( current <> null ) then Value.Metadata ( current )[Next] else null

        in

          current meta [ Next = link ]

    )
;

  // This function returns an absolute URL to the next page of data.
  //
  // The 'response' parameter typically contains the result of the call to Web.Contents.
  // The 'request' parameter is optional and contains values to formulate the request.
  // It is typically used when the next link is a relative URL and needs to be
  // appended to a base URL from the request. Its format is up to the extension author.
  //
  // The current implementation is specific to Github, which returns its next link
  // in a "Link" header in the response. The 'request' parameter is not used.
  // You will most likely need to replace the logic below with whatever paging
  // mechanism is used by your data source.
  //
  GetNextLink = ( response, optional request ) =>
    let
      // extract the "Link" header if it exists
      link = Value.Metadata ( response )[Headers][#"Link"]?, 
      links = Text.Split ( link, "," ), 

      splitLinks = List.Transform ( links, each Text.Split ( Text.Trim ( _ ), ";" ) ), 

      next = List.Select ( splitLinks, each Text.Trim ( _{1} ) = "rel=""next""" ), 

      first = List.First ( next ), 

      removedBrackets = Text.Range ( first{0}, 1, Text.Length ( first{0} ) - 2 )

    in

      try removedBrackets otherwise null
;

  //
  // OAuth2 flow definition
  //
  StartLogin = ( resourceUrl, state, display ) =>
    let
      AuthorizeUrl = "https://github.com/login/oauth/authorize?"
        &
 Uri.BuildQueryString (
          [
            client_id    = client_id, 
            scope        = "user, repo", 

            state        = state, 

            redirect_uri = redirect_uri

          ]

        )

    in

      [
        LoginUri     = AuthorizeUrl, 
        CallbackUri  = redirect_uri, 

        WindowHeight = windowHeight, 

        WindowWidth  = windowWidth, 

        Context      = null

      ]
;

  FinishLogin = ( context, callbackUri, state ) =>
    let
      Parts = Uri.Parts ( callbackUri )[Query]

    in

      TokenMethod ( Parts[code] )
;

  TokenMethod = ( code ) =>
    let
      Response = Web.Contents (
        "https://github.com/login/oauth/access_token", 
        [
          Content = Text.ToBinary (
            Uri.BuildQueryString (
              [
                client_id     = client_id, 
                client_secret = client_secret, 

                code          = code, 

                redirect_uri  = redirect_uri

              ]

            )

          )
, 

          Headers = [
            #"Content-type" = "application/x-www-form-urlencoded", 
            #"Accept"       = "application/json"

          ]

        ]

      )
, 

      Parts = Json.Document ( Response )

    in

      Parts
;

  //
  // Common code
  //
  // Calls the getNextPage function until it returns null.
  // Each call to getNextPage expects a table of data to be returned.
  // The result of the previous call to getNextPage is passed along to the next call.
  // Appends all pages (tables) together into a single result.
  // Returns an empty table if the first call to getNextPage returns null.
  Table.GenerateByPage = ( getNextPage as function ) as table =>
    let
      listOfPages = List.Generate (
        () => getNextPage ( null ), 
        ( lastPage ) => lastPage <> null, 

        ( lastPage ) => getNextPage ( lastPage )

      )
, 

      tableOfPages = Table.FromList ( listOfPages, Splitter.SplitByNothing(), { "Column1" } ), 

      firstRow = tableOfPages{0}?

    in

      if ( firstRow = null ) then
        Table.FromRows ( {} )

      else

        Value.ReplaceType (
          Table.ExpandTableColumn (
            tableOfPages, 
            "Column1", 

            Table.ColumnNames ( firstRow[Column1] )

          )
, 

          Value.Type ( firstRow[Column1] )

        )
;

github.query.pq

// This is where you can write sample queries to test your extension.
let
  source = GithubSample.Contents ( "https://api.github.com/" )

in

  source

HelloWorld.pq

section HelloWorld;
  [ DataSource.Kind = "HelloWorld", Publish = "HelloWorld.Publish" ]
  shared
 HelloWorld.Contents = ( optional message as text ) =>
    let
      message = if ( message <> null ) then message else "Hello world"

    in

      message
;

  HelloWorld = [
    TestConnection = ( dataSourcePath ) => { "HelloWorld.Contents" }, 
    Authentication = [ Anonymous = [] ], 

    Label          = Extension.LoadString ( "DataSourceLabel" )

  ]
;

  HelloWorld.Publish = [
    Beta = true, 
    ButtonText = { Extension.LoadString ( "FormulaTitle" ), Extension.LoadString ( "FormulaHelp" ) }, 

    SourceImage = HelloWorld.Icons, 

    SourceTypeImage = HelloWorld.Icons

  ]
;

  HelloWorld.Icons = [
    Icon16 = {
      Extension.Contents ( "HelloWorld16.png" ), 
      Extension.Contents ( "HelloWorld20.png" ), 

      Extension.Contents ( "HelloWorld24.png" ), 

      Extension.Contents ( "HelloWorld32.png" )

    }
, 

    Icon32 = {
      Extension.Contents ( "HelloWorld32.png" ), 
      Extension.Contents ( "HelloWorld40.png" ), 

      Extension.Contents ( "HelloWorld48.png" ), 

      Extension.Contents ( "HelloWorld64.png" )

    }

  ]
;

HelloWorld.query.pq

//Use this file to write queries.
HelloWorld.Contents ( "hello this is my message" )

HelloWorldWithDocs.pq

section HelloWorldWithDocs;
  [ DataSource.Kind = "HelloWorldWithDocs", Publish = "HelloWorldWithDocs.Publish" ]
  shared
 HelloWorldWithDocs.Contents = Value.ReplaceType ( HelloWorldImpl, HelloWorldType );

  HelloWorldType = type function (
    message as (
      type text
        meta
 [
          Documentation.FieldCaption     = "Message", 
          Documentation.FieldDescription = "Text to display", 

          Documentation.SampleValues     = { "Hello world", "Hola mundo" }

        ]

    )
, 

    optional count as (
      type number
        meta
 [
          Documentation.FieldCaption     = "Count", 
          Documentation.FieldDescription = "Number of times to repeat the message", 

          Documentation.AllowedValues    = { 1, 2, 3 }

        ]

    )

  )
 as table

    meta
 [
      Documentation.Name = "Hello - Name", 
      Documentation.LongDescription = "Hello - Long Description", 

      Documentation.Examples = {
        [
          Description = "Returns a table with 'Hello world' repeated 2 times", 
          Code        = "HelloWorldWithDocs.Contents(""Hello world"", 2)", 

          Result      = "#table({""Column1""}, {{""Hello world""}, {""Hello world""}})"

        ]
, 

        [
          Description = "Another example, new message, new count!", 
          Code        = "HelloWorldWithDocs.Contents(""Goodbye"", 1)", 

          Result      = "#table({""Column1""}, {{""Goodbye""}})"

        ]

      }

    ]
;

  HelloWorldImpl = ( message as text, optional count as number ) as table =>
    let
      _count         = if ( count <> null ) then count else 5, 
      listOfMessages = List.Repeat ( { message }, _count ), 

      table          = Table.FromList ( listOfMessages, Splitter.SplitByNothing() )

    in

      table
;

  // Data Source Kind description
  HelloWorldWithDocs = [
    Authentication = [ Anonymous = [] ] //,

  //Label = "Hello World With Docs"
  ]
;

  // Data Source UI publishing description
  HelloWorldWithDocs.Publish = [
    Beta = true, 
    Category = "Other", 

    ButtonText = {
      "Hello World With Docs", 
      "Provides an example of how to provide function documentation"

    }

  ]
;

HelloWorldWithDocs.query.pq

// Use this file to write queries to test your data connector
let
  result = HelloWorldWithDocs.Contents ( "Hello world", 2 )

in

  result

HiveSample.pq

section HiveSample;
  // When set to true, additional trace information will be written out to the User log.
  // This should be set to false before release. Tracing is done through a call to
  // Diagnostics.LogValue(). When EnableTraceOutput is set to false, the call becomes a
  // no-op and simply returns the original value.
  EnableTraceOutput = false
;

  /****************************
* ODBC Driver Configuration
****************************/
  Config_DriverName = "Microsoft Hive ODBC Driver"
;

  // Config_DriverName = "Hortonworks Hive ODBC Driver";
  Config_SqlConformance = SQL_SC[SQL_SC_SQL92_FULL]
;

  // null, 1, 2, 4, 8
  Config_DefaultUsernamePasswordHandling = true
;

  // true, false
  Config_UseParameterBindings = false
;

  // true, false, null
  Config_StringLiterateEscapeCharacters = { "\" }
;

  // ex. { "\" }
  Config_UseCastInsteadOfConvert = null
;

  // true, false, null
  Config_SupportsTop = false
;

  // true, false
  // Set this to true to enable Direct Query in addition to Import mode.
  //
  // *** IMPORTANT ***: Direct Query will only provide a decent user experience
  // if you are connecting to a Hive LLAP instance.
  //
  Config_EnableDirectQuery = true
;

  // true, false
  [ DataSource.Kind = "HiveSample", Publish = "HiveSample.Publish" ]
  shared
 HiveSample.Contents = ( host as text, port as number ) =>
    let
      //
      // Connection string settings
      //
      ConnectionString = [
        Driver = Config_DriverName, 
        // set all connection string properties
        host = host, 
        port = port, 

        authmech = 3,  // UsernamePassword

        thrifttransport = 1 // SASL

      ]
, 

      //
      // Handle credentials
      // Credentials are not persisted with the query and are set through a separate
      // record field - CredentialConnectionString. The base Odbc.DataSource function
      // will handle UsernamePassword authentication automatically, but it is explictly
      // handled here as an example.
      //
      Credential = Extension.CurrentCredential(), 
      CredentialConnectionString = 
        if
 Credential[AuthenticationKind]? = "UsernamePassword" then
          // set connection string parameters used for basic authentication
          [ UID = Credential[Username], PWD = Credential[Password] ]
        else

          error Error.Record (
            "Error", 
            "Unhandled authentication kind: " & Credential[AuthenticationKind]?

          )
, 

      //
      // Configuration options for the call to Odbc.DataSource
      //
      defaultConfig = BuildOdbcConfig(), 
      SqlCapabilities = defaultConfig[SqlCapabilities]
        &
 [
          // place custom overrides here
          GroupByCapabilities = SQL_GB[SQL_GB_COLLATE]

        ]
, 

      // Please refer to the ODBC specification for SQLGetInfo properties and values.
      // https://github.com/Microsoft/ODBC-Specification/blob/master/Windows/inc/sqlext.h
      SQLGetInfo = defaultConfig[SQLGetInfo]
        &
 [
          // place custom overrides here
          SQL_STRING_FUNCTIONS = 
            let

              // this is the value reported by the driver: 277753
              driverDefault = {
                SQL_FN_STR[SQL_FN_STR_CONCAT], 
                SQL_FN_STR[SQL_FN_STR_LTRIM], 

                SQL_FN_STR[SQL_FN_STR_LENGTH], 

                SQL_FN_STR[SQL_FN_STR_LOCATE], 

                SQL_FN_STR[SQL_FN_STR_LCASE], 

                SQL_FN_STR[SQL_FN_STR_REPEAT], 

                SQL_FN_STR[SQL_FN_STR_RTRIM], 

                SQL_FN_STR[SQL_FN_STR_SUBSTRING], 

                SQL_FN_STR[SQL_FN_STR_UCASE], 

                SQL_FN_STR[SQL_FN_STR_ASCII], 

                SQL_FN_STR[SQL_FN_STR_SPACE]

              }
, 

              // add missing string functions
              updated = driverDefault
                &
 { SQL_FN_STR[SQL_FN_STR_LEFT], SQL_FN_STR[SQL_FN_STR_RIGHT] }

            in

              Flags ( updated )
, 

          SQL_NUMERIC_FUNCTIONS = 
            let

              // this is the value reported by the driver: 8386415
              driverDefault = {
                SQL_FN_NUM[SQL_FN_NUM_ABS], 
                SQL_FN_NUM[SQL_FN_NUM_ASIN], 

                SQL_FN_NUM[SQL_FN_NUM_ATAN2], 

                SQL_FN_NUM[SQL_FN_NUM_LOG], 

                SQL_FN_NUM[SQL_FN_NUM_SIN], 

                SQL_FN_NUM[SQL_FN_NUM_SQRT], 

                SQL_FN_NUM[SQL_FN_NUM_LOG10], 

                SQL_FN_NUM[SQL_FN_NUM_POWER], 

                SQL_FN_NUM[SQL_FN_NUM_RADIANS]

              }
, 

              // add missing functions
              updated = driverDefault & { SQL_FN_NUM[SQL_FN_NUM_MOD] }

            in

              Flags ( updated )

        ]
, 

      // SQLGetTypeInfo can be specified in two ways:
      // 1. A #table() value that returns the same type information as an ODBC
      // call to SQLGetTypeInfo.
      // 2. A function that accepts a table argument, and returns a table. The
      // argument will contain the original results of the ODBC call to SQLGetTypeInfo.
      // Your function implementation can modify/add to this table.
      //
      // For details of the format of the types table parameter and expected return value,
      // please see: https://docs.microsoft.com/en-us/sql/odbc/reference/syntax/sqlgettypeinfo-function
      //
      // The sample implementation provided here will simply output the original table
      // to the user trace log, without any modification.
      SQLGetTypeInfo = ( types ) =>
        if ( EnableTraceOutput <> true ) then
          types

        else

          let
            // Outputting the entire table might be too large, and result in the value being truncated.
            // We can output a row at a time instead with Table.TransformRows()
            rows = Table.TransformRows (
              types, 
              each Diagnostics.LogValue ( "SQLGetTypeInfo " & _[TYPE_NAME], _ )

            )
, 

            toTable = Table.FromRecords ( rows )

          in

            Value.ReplaceType ( toTable, Value.Type ( types ) )
, 

      // SQLColumns is a function handler that receives the results of an ODBC call
      // to SQLColumns(). The source parameter contains a table with the data type
      // information. This override is typically used to fix up data type mismatches
      // between calls to SQLGetTypeInfo and SQLColumns.
      //
      // For details of the format of the source table parameter, please see:
      // https://docs.microsoft.com/en-us/sql/odbc/reference/syntax/sqlcolumns-function
      //
      // The sample implementation provided here will simply output the original table
      // to the user trace log, without any modification.
      SQLColumns = ( catalogName, schemaName, tableName, columnName, source ) =>
        if ( EnableTraceOutput <> true ) then
          source

        else
 // the if statement conditions will force the values to evaluated/written to diagnostics
        if (
          Diagnostics.LogValue ( "SQLColumns.TableName", tableName )
            <>
 "***"

            and
 Diagnostics.LogValue ( "SQLColumns.ColumnName", columnName ) <> "***"

        )

        then

          let
            // Outputting the entire table might be too large, and result in the value being truncated.
            // We can output a row at a time instead with Table.TransformRows()
            rows = Table.TransformRows ( source, each Diagnostics.LogValue ( "SQLColumns", _ ) ), 
            toTable = Table.FromRecords ( rows )

          in

            Value.ReplaceType ( toTable, Value.Type ( source ) )

        else

          source
, 

      // Hive format is "LIMIT [<skip>,]<take>" - ex. LIMIT 2,10 or LIMIT 10
      // There's no direct support for the "skip" in this format but "take" is the important one to support.
      LimitClauseKind = LimitClauseKind.Limit, 
      OdbcDatasource = Odbc.DataSource (
        ConnectionString, 
        [
          // A logical (true/false) that sets whether to view the tables grouped by their schema names
          HierarchicalNavigation = true, 
          // Prevents execution of native SQL statements. Extensions should set this to true.
          HideNativeQuery = true, 
          // Enables connection pooling via the system ODBC manager
          ClientConnectionPooling = true, 
          // Show table relationships
          CreateNavigationProperties = true, 
          // These values should be set by previous steps
          CredentialConnectionString = CredentialConnectionString, 
          AstVisitor = AstVisitor, 

          SqlCapabilities = SqlCapabilities, 

          SQLColumns = SQLColumns, 

          SQLGetInfo = SQLGetInfo, 

          SQLGetTypeInfo = SQLGetTypeInfo

        ]

      )

    in

      OdbcDatasource
;

  // Data Source Kind description
  HiveSample = [
    // Set the TestConnection handler to enable gateway support.
    // The TestConnection handler will invoke your data source function to
    // validate the credentials the user has provider. Ideally, this is not
    // an expensive operation to perform. By default, the dataSourcePath value
    // will be a json string containing the required parameters of your data
    // source function. These should be parsed and parsed as individual parameters
    // to the specified data source function.
    TestConnection = ( dataSourcePath ) =>
      let
        json = Json.Document ( dataSourcePath ), 
        host = json[host],  // name of function parameter

        port = json[port] // name of function parameter

      in

        { "HiveSample.Contents", host, port }
, 

    // Set supported types of authentication
    Authentication = [ UsernamePassword = [] ], 
    Label = Extension.LoadString ( "DataSourceLabel" )

  ]
;

  // Data Source UI publishing description
  HiveSample.Publish = [
    Beta = true, 
    Category = "Other", 

    ButtonText = { Extension.LoadString ( "ButtonTitle" ), Extension.LoadString ( "ButtonHelp" ) }, 

    LearnMoreUrl = "https://powerbi.microsoft.com/", 

    SupportsDirectQuery = Config_EnableDirectQuery, 

    SourceImage = HiveSample.Icons, 

    SourceTypeImage = HiveSample.Icons

  ]
;

  HiveSample.Icons = [
    Icon16 = {
      Extension.Contents ( "HiveSample16.png" ), 
      Extension.Contents ( "HiveSample20.png" ), 

      Extension.Contents ( "HiveSample24.png" ), 

      Extension.Contents ( "HiveSample32.png" )

    }
, 

    Icon32 = {
      Extension.Contents ( "HiveSample32.png" ), 
      Extension.Contents ( "HiveSample40.png" ), 

      Extension.Contents ( "HiveSample48.png" ), 

      Extension.Contents ( "HiveSample64.png" )

    }

  ]
;

  // build settings based on configuration variables
  BuildOdbcConfig = () as record =>
    let
      defaultConfig = [ SqlCapabilities = [], SQLGetFunctions = [], SQLGetInfo = [] ], 
      withParams = 
        if
 ( Config_UseParameterBindings = false ) then
          let
            caps = defaultConfig[SqlCapabilities]
              &
 [
                SqlCapabilities = [
                  SupportsNumericLiterals       = true, 
                  SupportsStringLiterals        = true, 

                  SupportsOdbcDateLiterals      = true, 

                  SupportsOdbcTimeLiterals      = true, 

                  SupportsOdbcTimestampLiterals = true

                ]

              ]
, 

            funcs = defaultConfig[SQLGetFunctions]
              &
 [ SQLGetFunctions = [ SQL_API_SQLBINDPARAMETER = false ] ]

          in

            defaultConfig & caps & funcs

        else

          defaultConfig
, 

      withEscape = 
        if
 ( Config_StringLiterateEscapeCharacters <> null ) then
          let
            caps = withParams[SqlCapabilities]
              &
 [
                SqlCapabilities = [
                  StringLiteralEscapeCharacters = Config_StringLiterateEscapeCharacters

                ]

              ]

          in

            withParams & caps

        else

          withParams
, 

      withTop = 
        let

          caps = withEscape[SqlCapabilities]
            &
 [ SqlCapabilities = [ SupportsTop = Config_SupportsTop ] ]

        in

          withEscape & caps
, 

      withCastOrConvert = 
        if
 ( Config_UseCastInsteadOfConvert = true ) then
          let
            caps = withTop[SQLGetFunctions]
              &
 [ SQLGetFunctions = [ SQL_CONVERT_FUNCTIONS = 0x2 /* SQL_FN_CVT_CAST */ ] ]

          in

            withTop & caps

        else

          withTop
, 

      withSqlConformance = 
        if
 ( Config_SqlConformance <> null ) then
          let
            caps = withCastOrConvert[SQLGetInfo]
              &
 [ SQLGetInfo = [ SQL_SQL_CONFORMANCE = Config_SqlConformance ] ]

          in

            withCastOrConvert & caps

        else

          withCastOrConvert

    in

      withSqlConformance
;

  //
  // Load common library functions
  //
  Extension.LoadFunction = ( name as text ) =>
    let
      binary = Extension.Contents ( name ), 
      asText = Text.FromBinary ( binary )

    in

      Expression.Evaluate ( asText, #shared )
;

  // Diagnostics module contains multiple functions. We can take the ones we need.
  Diagnostics = Extension.LoadFunction ( "Diagnostics.pqm" )
;

  Diagnostics.LogValue = 
    if
 ( EnableTraceOutput ) then
      Diagnostics[LogValue]

    else

      ( prefix, value ) => value
;

  // OdbcConstants contains numeric constants from the ODBC header files, and a
  // helper function to create bitfield values.
  ODBC = Extension.LoadFunction ( "OdbcConstants.pqm" )
;

  // Expose the constants and bitfield helpers
  Flags = ODBC[Flags]
;

  SQL_FN_STR = ODBC[SQL_FN_STR];

  SQL_SC = ODBC[SQL_SC];

  SQL_GB = ODBC[SQL_GB];

  SQL_FN_NUM = ODBC[SQL_FN_NUM];

HiveSample.query.pq

// Tested with Hortonworks Sandbox
let
  Source             = HiveSample.Contents ( "127.0.0.1", 10500 ), 
  HIVE_Database      = Source{[ Name = "HIVE", Kind = "Database" ]}[Data], 

  foodmart_Schema    = HIVE_Database{[ Name = "foodmart", Kind = "Schema" ]}[Data], 

  customer_Table     = foodmart_Schema{[ Name = "customer", Kind = "Table" ]}[Data], 

  #"Kept First Rows" = Table.FirstN ( customer_Table, 5 )

in

  #"Kept First Rows"

ImpalaODBC.pq

section ImpalaODBC;
  DefaultPort = 21050;
  [ DataSource.Kind = "ImpalaODBC", Publish = "ImpalaODBC.UI" ]
  shared
 ImpalaODBC.Databases = ( server as text, optional options as record ) as table =>
    let
      Address = GetAddress ( server ), 
      HostAddress = Address[Host], 

      HostPort = Address[Port], 

      ConnectionString = [
        Driver = "Cloudera Impala ODBC Driver", 
        Host = HostAddress, 

        Port = HostPort, 

        // We need a database name that doesn't exist in the server, because if the user doesn't have access to
        // "default", it throws an exception. Specifying a DB that doesn't exist works fine, though.
        Database = "DoesNotExist9A8CF2764FB34AECA572E2789EB6B2A2", 
        UseUnicodeSqlCharacterTypes = 1

      ]
, 

      CommonOptions = [
        CredentialConnectionString = GetCredentialConnectionString(), 
        ClientConnectionPooling    = true, 

        OnError                    = OnError

      ]
, 

      OdbcDatasource = Odbc.DataSource (
        ConnectionString, 
        [
          HierarchicalNavigation = true, 
          TolerateConcatOverflow = true, 

          SqlCapabilities = [
            SupportsTop                   = true, 
            Sql92Conformance              = 8, 

            SupportsNumericLiterals       = true, 

            SupportsStringLiterals        = true, 

            SupportsOdbcDateLiterals      = true, 

            SupportsOdbcTimeLiterals      = true, 

            SupportsOdbcTimestampLiterals = true

          ]
, 

          SQLGetFunctions = [
            // Disable using parameters in the queries that get generated.
            // We enable numeric and string literals which should enable literals for all constants.
            SQL_API_SQLBINDPARAMETER = false

          ]

        ]

          &
 CommonOptions

      )
, 

      ComplexColumnsRemoved = RemoveComplexColumnsFromNavigation (
        OdbcDatasource{[ Name = "IMPALA", Kind = "Database" ]}[Data]

      )

    in

      ComplexColumnsRemoved
;

  // Sets the connection string properties related to authentication.
  // The set of connection string properties will vary between drivers.
  // Please check the ODBC driver's documentation for the exact properties to use.
  // For Impala, we set:
  // AuthMech: integer value indicating the auth type (Anonymous, Kerberos/Windows, UsernamePassword)
  // UID: user ID (when using UsernamePassword auth)
  // PWD: password (when using UsernamePassword auth)
  // UseOnlySSPI: bool (1/0) required when using Kerberos/Windows auth
  // SSL: bool (1/0) indicating whether to connect using SSL
  // UseSystemTrustStore: bool (1/0) should match the value for SSL
  GetCredentialConnectionString = () as record =>
    let
      Credential = Extension.CurrentCredential(), 
      AuthKind = Credential[AuthenticationKind], 

      AuthMech.Anonymous = 0, 

      AuthMech.Kerberos = 1, 

      AuthMech.UsernamePassword = 3, 

      ConnectionString = 
        if
 AuthKind = "Implicit" or AuthKind = "Anonymous" then
          [ AuthMech = AuthMech.Anonymous ]

        else
 if AuthKind = "Windows" then
          [ AuthMech = AuthMech.Kerberos, UseOnlySSPI = 1 ]

        else
 if AuthKind = "UsernamePassword" then
          [
            AuthMech = AuthMech.UsernamePassword, 
            UID      = Credential[Username], 

            PWD      = Credential[Password]

          ]

        else

          // Unknown/unexpected auth kind, return "not implemented" as an error
          ...
, 

      // This logic is for data sources that optional SSL/encrypted connections.
      // When SupportsEncryption is set to true on the data source kind record,
      // Power Query will try to connect using SSL. If that fails, the connector
      // should return Extension.CredentialError(Credential.EncryptionNotSupported)
      // to indicate that encryption isn't enabled for this source. If the user then
      // chooses to establish an unencrypted connection, Credential[EncryptConnection]
      // will be set to false on the subsequent connection attempt.
      EncryptConnection = Credential[EncryptConnection]?, 
      SSL = if EncryptConnection = null or EncryptConnection = true then 1 else 0

    in

      ConnectionString & [ SSL = SSL, UseSystemTrustStore = SSL ]
;

  RemoveComplexColumns = ( data as table ) as table =>
    let
      SchemaTable = Table.Schema ( data ), 
      ComplexColumnNames = Table.SelectRows (
        SchemaTable, 
        each Text.Contains ( [NativeTypeName], "<" )

      )
[Name]
, 

      ComplexColumnsRemoved = 
        if
 List.Count ( ComplexColumnNames ) = Table.RowCount ( SchemaTable ) then
          error [ Reason = "DataSource.Error", Message = "No Scalar Columns available" ]

        else

          Table.RemoveColumns ( data, ComplexColumnNames )

    in

      ComplexColumnsRemoved
;

  RemoveComplexColumnsFromNavigation = ( source as table ) as table =>
    let
      TableLevelNavigationTableType = Value.Type ( source{0}[Data] ), 
      TransformedTable = Table.TransformColumns (
        source, 
        {
          {
            "Data", 
            ( data ) =>
              Value.ReplaceType (
                Table.TransformColumns (
                  data, 
                  { { "Data", ( rawTable ) => RemoveComplexColumns ( rawTable ) } }

                )
, 

                TableLevelNavigationTableType

              )

          }

        }

      )

    in

      Value.ReplaceType ( TransformedTable, Value.Type ( source ) )
;

  OnError = ( errorRecord as record ) =>
    let
      OdbcError = errorRecord[Detail][OdbcErrors]{0}, 
      OdbcErrorMessage = OdbcError[Message], 

      OdbcErrorCode = OdbcError[NativeError], 

      HasCredentialError = errorRecord[Detail]
        <>
 null

        and
 errorRecord[Detail][OdbcErrors]? <> null

        and
 Text.Contains ( OdbcErrorMessage, "[ThriftExtension]" )

        and
 OdbcErrorCode <> 0

        and
 OdbcErrorCode <> 7
, 

      IsSSLError = OdbcErrorCode = 6

    in

      if HasCredentialError then
        if IsSSLError then
          error Extension.CredentialError ( Credential.EncryptionNotSupported )

        else

          error Extension.CredentialError ( Credential.AccessDenied, OdbcErrorMessage )

      else

        error errorRecord
;

  GetAddress = ( server as text ) as record =>
    let
      Address = Uri.Parts ( "http://" & server ), 
      BadServer = Address[Host]
        =
 ""

        or
 Address[Scheme] <> "http"

        or
 Address[Path] <> "/"

        or
 Address[Query] <> []

        or
 Address[Fragment] <> ""

        or
 Address[UserName] <> ""

        or
 Address[Password] <> ""
, 

      Port = 
        if
 Address[Port] = 80 and not Text.EndsWith ( server, ":80" ) then
          DefaultPort

        else

          Address[Port]
, 

      Host = Address[Host], 

      Result = [ Host = Host, Port = Port ]

    in

      if BadServer then error "Invalid server name" else Result
;

  ImpalaODBC = [
    Authentication = [ Implicit = [], UsernamePassword = [], Windows = [] ], 
    // Indicate optional SSL requirement
    SupportsEncryption = true

  ]
;

  ImpalaODBC.UI = [
    Beta                = true, 
    Category            = "Database", 

    ButtonText          = { "ImpalaODBC Sample", "ImpalaODBC Sample" }, 

    SupportsDirectQuery = true

  ]
;

ImpalaODBC.query.pq

// Use this file to write queries to test your data connector
let
  result = ImpalaODBC.Databases ( "localhost" )

in

  result

MyGraph.pq

section MyGraph;
  //
  // OAuth configuration settings
  //
  // TODO: set AAD client ID value in the client_id file
  client_id = Text.FromBinary ( Extension.Contents ( "client_id" ) )
;

  redirect_uri = "https://oauth.powerbi.com/views/oauthredirect.html";

  token_uri = "https://login.microsoftonline.com/organizations/oauth2/v2.0/token";

  authorize_uri = "https://login.microsoftonline.com/organizations/oauth2/v2.0/authorize";

  logout_uri = "https://login.microsoftonline.com/logout.srf";

  windowWidth = 720;

  windowHeight = 1024;

  // The "offline_access" scope is required to receive a refresh token value. It is added
  // separately from the Graph scopes. Please see
  // https://docs.microsoft.com/en-us/azure/active-directory/develop/v2-permissions-and-consent#offline_access
  //
  // For more information on available Graph scopes, please see
  // https://developer.microsoft.com/en-us/graph/docs/authorization/permission_scopes
  scope_prefix = "https://graph.microsoft.com/"
;

  scopes = {
    "User.ReadWrite", 
    "Contacts.Read", 

    "User.ReadBasic.All", 

    "Calendars.ReadWrite", 

    "Mail.ReadWrite", 

    "Mail.Send", 

    "Contacts.ReadWrite", 

    "Files.ReadWrite", 

    "Tasks.ReadWrite", 

    "People.Read", 

    "Notes.ReadWrite.All", 

    "Sites.Read.All"

  }
;

  //
  // Exported function(s)
  //
  [ DataSource.Kind = "MyGraph", Publish = "MyGraph.UI" ]
  shared
 MyGraph.Feed = () =>
    let
      source = OData.Feed (
        "https://graph.microsoft.com/v1.0/me/", 
        null, 

        [ ODataVersion = 4, MoreColumns = true ]

      )

    in

      source
;

  //
  // Data Source definition
  //
  MyGraph = [
    TestConnection = ( dataSourcePath ) => { "MyGraph.Feed" }, 
    Authentication = [
      OAuth = [
        StartLogin  = StartLogin, 
        FinishLogin = FinishLogin, 

        Refresh     = Refresh, 

        Logout      = Logout

      ]

    ]
, 

    Label = "My Graph Connector"

  ]
;

  //
  // UI Export definition
  //
  MyGraph.UI = [
    Beta            = true, 
    ButtonText      = { "MyGraph.Feed", "Connect to Graph" }, 

    SourceImage     = MyGraph.Icons, 

    SourceTypeImage = MyGraph.Icons

  ]
;

  MyGraph.Icons = [
    Icon16 = {
      Extension.Contents ( "MyGraph16.png" ), 
      Extension.Contents ( "MyGraph20.png" ), 

      Extension.Contents ( "MyGraph24.png" ), 

      Extension.Contents ( "MyGraph32.png" )

    }
, 

    Icon32 = {
      Extension.Contents ( "MyGraph32.png" ), 
      Extension.Contents ( "MyGraph40.png" ), 

      Extension.Contents ( "MyGraph48.png" ), 

      Extension.Contents ( "MyGraph64.png" )

    }

  ]
;

  //
  // OAuth implementation
  //
  // See the following links for more details on AAD/Graph OAuth:
  // * https://docs.microsoft.com/en-us/azure/active-directory/active-directory-protocols-oauth-code
  // * https://graph.microsoft.io/en-us/docs/authorization/app_authorization
  //
  // StartLogin builds a record containing the information needed for the client
  // to initiate an OAuth flow. Note for the AAD flow, the display parameter is
  // not used.
  //
  // resourceUrl: Derived from the required arguments to the data source function
  // and is used when the OAuth flow requires a specific resource to
  // be passed in, or the authorization URL is calculated (i.e. when
  // the tenant name/ID is included in the URL). In this example, we
  // are hardcoding the use of the "common" tenant, as specified by
  // the 'authorize_uri' variable.
  // state: Client state value we pass through to the service.
  // display: Used by certain OAuth services to display information to the
  // user.
  //
  // Returns a record containing the following fields:
  // LoginUri: The full URI to use to initiate the OAuth flow dialog.
  // CallbackUri: The return_uri value. The client will consider the OAuth
  // flow complete when it receives a redirect to this URI. This
  // generally needs to match the return_uri value that was
  // registered for your application/client.
  // WindowHeight: Suggested OAuth window height (in pixels).
  // WindowWidth: Suggested OAuth window width (in pixels).
  // Context: Optional context value that will be passed in to the FinishLogin
  // function once the redirect_uri is reached.
  //
  StartLogin = ( resourceUrl, state, display ) =>
    let
      authorizeUrl = authorize_uri
        &
 "?"

        &
 Uri.BuildQueryString (
          [
            client_id     = client_id, 
            redirect_uri  = redirect_uri, 

            state         = state, 

            scope         = "offline_access " & GetScopeString ( scopes, scope_prefix ), 

            response_type = "code", 

            response_mode = "query", 

            login         = "login"

          ]

        )

    in

      [
        LoginUri     = authorizeUrl, 
        CallbackUri  = redirect_uri, 

        WindowHeight = 720, 

        WindowWidth  = 1024, 

        Context      = null

      ]
;

  // FinishLogin is called when the OAuth flow reaches the specified redirect_uri.
  // Note for the AAD flow, the context and state parameters are not used.
  //
  // context: The value of the Context field returned by StartLogin. Use this to
  // pass along information derived during the StartLogin call (such as
  // tenant ID)
  // callbackUri: The callbackUri containing the authorization_code from the service.
  // state: State information that was specified during the call to StartLogin.
  FinishLogin = ( context, callbackUri, state ) =>
    let
      // parse the full callbackUri, and extract the Query string
      parts = Uri.Parts ( callbackUri )[Query], 
      // if the query string contains an "error" field, raise an error
      // otherwise call TokenMethod to exchange our code for an access_token
      result = 
        if
 ( Record.HasFields ( parts, { "error", "error_description" } ) ) then
          error Error.Record ( parts[error], parts[error_description], parts )

        else

          TokenMethod ( "authorization_code", "code", parts[code] )

    in

      result
;

  // Called when the access_token has expired, and a refresh_token is available.
  //
  Refresh = ( resourceUrl, refresh_token ) =>
    TokenMethod ( "refresh_token", "refresh_token", refresh_token )
;

  Logout = ( token ) => logout_uri;

  // grantType: Maps to the "grant_type" query parameter.
  // tokenField: The name of the query parameter to pass in the code.
  // code: Is the actual code (authorization_code or refresh_token) to send to the service.
  TokenMethod = ( grantType, tokenField, code ) =>
    let
      queryString = [
        client_id    = client_id, 
        scope        = "offline_access " & GetScopeString ( scopes, scope_prefix ), 

        grant_type   = grantType, 

        redirect_uri = redirect_uri

      ]
, 

      queryWithCode = Record.AddField ( queryString, tokenField, code ), 

      tokenResponse = Web.Contents (
        token_uri, 
        [
          Content = Text.ToBinary ( Uri.BuildQueryString ( queryWithCode ) ), 
          Headers = [
            #"Content-type" = "application/x-www-form-urlencoded", 
            #"Accept"       = "application/json"

          ]
, 

          ManualStatusHandling = { 400 }

        ]

      )
, 

      body = Json.Document ( tokenResponse ), 

      result = 
        if
 ( Record.HasFields ( body, { "error", "error_description" } ) ) then
          error Error.Record ( body[error], body[error_description], body )

        else

          body

    in

      result
;

  //
  // Helper Functions
  //
  Value.IfNull = ( a, b ) => if a <> null then a else b
;

  GetScopeString = ( scopes as list, optional scopePrefix as text ) as text =>
    let
      prefix    = Value.IfNull ( scopePrefix, "" ), 
      addPrefix = List.Transform ( scopes, each prefix & _ ), 

      asText    = Text.Combine ( addPrefix, " " )

    in

      asText
;

MyGraph.query.pq

MyGraph.Feed()

NavigationTable.pq

// This file contains your Data Connector logic
section NavigationTable;
  [ DataSource.Kind = "NavigationTable", Publish = "NavigationTable.Publish" ]
  shared
 NavigationTable.Simple = () as table =>
    let
      objects = #table (
        { "Name", "Key", "Data", "ItemKind", "ItemName", "IsLeaf" }, 
        {
          { "Item1", "item1", #table ( { "Column1" }, { { "Item1" } } ), "Table", "Table", true }, 
          { "Item2", "item2", #table ( { "Column1" }, { { "Item2" } } ), "Table", "Table", true }, 

          { "Item3", "item3", FunctionCallThatReturnsATable(), "Table", "Table", true }, 

          { "MyFunction", "myfunction", AnotherFunction.Contents(), "Function", "Function", true }

        }

      )
, 

      NavTable = Table.ToNavigationTable (
        objects, 
        { "Key" }, 

        "Name", 

        "Data", 

        "ItemKind", 

        "ItemName", 

        "IsLeaf"

      )

    in

      NavTable
;

  [ DataSource.Kind = "NavigationTable", Publish = "NavigationTable.Publish2" ]
  shared
 NavigationTable.Nested = () as table =>
    let
      objects = #table (
        { "Name", "Key", "Data", "ItemKind", "ItemName", "IsLeaf" }, 
        {
          { "Nested A", "n1", CreateNavTable ( "AAA" ), "Database", "Database", false }, 
          { "Nested B", "n2", CreateNavTable ( "BBB" ), "Folder", "Folder", false }, 

          { "Nested C", "n3", CreateNavTable ( "CCC" ), "Sheet", "Sheet", false }

        }

      )
, 

      NavTable = Table.ToNavigationTable (
        objects, 
        { "Key" }, 

        "Name", 

        "Data", 

        "ItemKind", 

        "ItemName", 

        "IsLeaf"

      )

    in

      NavTable
;

  [ DataSource.Kind = "NavigationTable", Publish = "NavigationTable.Publish3" ]
  shared
 NavigationTable.Icons = () as table =>
    let
      // Dynamically builds a navigation table that uses all possible item kind icons
      itemKinds = {
        "Feed", 
        "Cube", 

        "CubeDatabase", 

        "CubeView", 

        "CubeViewFolder", 

        "Database", 

        "DatabaseServer", 

        "Dimension", 

        "Table", 

        "Folder", 

        "View", 

        "Sheet", 

        "Subcube", 

        "DefinedName", 

        "Record", 

        "Function"

      }
, 

      asTable = Table.FromList ( itemKinds, Splitter.SplitByNothing() ), 

      rename = Table.RenameColumns ( asTable, { { "Column1", "Name" } } ), 

      // Add Data as a calculated column
      withData = Table.AddColumn ( rename, "Data", each CreateNavTable ( [Name] ), type table ), 
      // Add ItemKind and ItemName as fixed text values
      withItemKind = Table.AddColumn ( withData, "ItemKind", each [Name], type text ), 
      withItemName = Table.AddColumn ( withItemKind, "ItemName", each [Name], type text ), 

      // Indicate that the node should not be expandable
      withIsLeaf = Table.AddColumn ( withItemName, "IsLeaf", each false, type logical ), 
      // Generate the nav table
      navTable = Table.ToNavigationTable (
        withIsLeaf, 
        { "Name" }, 

        "Name", 

        "Data", 

        "ItemKind", 

        "ItemName", 

        "IsLeaf"

      )

    in

      navTable
;

  // Data Source Kind description
  NavigationTable = [ Authentication = [ Implicit = [] ], Label = "Navigation Table Sample" ]
;

  // Data Source UI publishing description
  NavigationTable.Publish = [
    Beta       = true, 
    Category   = "Other", 

    ButtonText = { "NavTable Simple", "NavTable Simple" }

  ]
;

  // Data Source UI publishing description
  NavigationTable.Publish2 = [
    Beta       = true, 
    Category   = "Other", 

    ButtonText = { "NavTable Nested", "NavTable Nested" }

  ]
;

  // Data Source UI publishing description
  NavigationTable.Publish3 = [
    Beta       = true, 
    Category   = "Other", 

    ButtonText = { "NavTable Icons", "NavTable Icons" }

  ]
;

  // Implementation functions
  FunctionCallThatReturnsATable = () as table =>
    #table ( { "DynamicColumn" }, { { "Dynamic Value" } } )
;

  AnotherFunction.Contents = () => "Returns a static string when invoked.";

  CreateNavTable = ( message as text ) as table =>
    let
      objects = #table (
        { "Name", "Key", "Data", "ItemKind", "ItemName", "IsLeaf" }, 
        {
          { "Item1", "item1", #table ( { "Column1" }, { { message } } ), "Table", "Table", true }, 
          { "Item2", "item2", #table ( { "Column1" }, { { message } } ), "Table", "Table", true }

        }

      )
, 

      NavTable = Table.ToNavigationTable (
        objects, 
        { "Key" }, 

        "Name", 

        "Data", 

        "ItemKind", 

        "ItemName", 

        "IsLeaf"

      )

    in

      NavTable
;

  // Common library code
  Table.ToNavigationTable = (
    table as table, 
    keyColumns as list, 

    nameColumn as text, 

    dataColumn as text, 

    itemKindColumn as text, 

    itemNameColumn as text, 

    isLeafColumn as text

  )
 as table =>
    let
      tableType = Value.Type ( table ), 
      newTableType = Type.AddTableKey ( tableType, keyColumns, true )
        meta
 [
          NavigationTable.NameColumn     = nameColumn, 
          NavigationTable.DataColumn     = dataColumn, 

          NavigationTable.ItemKindColumn = itemKindColumn, 

          Preview.DelayColumn            = itemNameColumn, 

          NavigationTable.IsLeafColumn   = isLeafColumn

        ]
, 

      navigationTable = Value.ReplaceType ( table, newTableType )

    in

      navigationTable
;

NavigationTable.query.pq

// Use this file to write queries to test your data connector
let
  result = NavigationTable.Icons()

in

  result

OpenApiSample.pq

section OpenApiSample;
  DefaultOptions = [
    // The built-in credential handling for OpenApi.Document only works
    // with Basic (UsernamePassword) auth. All other types should be handled
    // explicitly using the ManualCredentials option.
    //
    // In the this sample, all of the calls we'll make will work anonymously.
    // We can force anonymous access by setting ManualCredentials to true, and then
    // not setting any additional request headers/parameters.
    //
    ManualCredentials = true, 
    // The returned data will match the schema defined in the swagger file.
    // This means that additional fields and object types that don't have explicit
    // properties defined will be ignored. To see all results, we set the IncludeMoreColumns
    // option to true. Any fields found in the response that aren't found in the schema will
    // be grouped under this column in a record value.
    //
    IncludeMoreColumns = true, 
    // When IncludeExtensions is set to true, vendor extensions in the swagger ("x-*" fields)
    // will be included as meta values on the function.
    //
    IncludeExtensions = true

  ]
;

  [ DataSource.Kind = "OpenApiSample", Publish = "OpenApiSample.Petstore.Publish" ]
  shared
 OpenApiSample.Petstore = () =>
    let
      // Pull the latest swagger definition from the site
      swaggerUrl = "http://petstore.swagger.io/v2/swagger.json", 
      swagger = Web.Contents ( swaggerUrl ), 

      // OpenApi.Document will return a navigation table
      nav = OpenApi.Document ( swagger, DefaultOptions )

    in

      nav
;

  [ DataSource.Kind = "OpenApiSample", Publish = "OpenApiSample.ApisGuru.Publish" ]
  shared
 OpenApiSample.ApisGuru = () =>
    let
      // load the swagger definition from the bundled resource file
      Swagger = Extension.Contents ( "apisGuru.json" ), 
      // OpenApi.Document returns a navigation table. Each GET operation defined
      // in the swagger document is becomes a row in the nav table. Each row is a
      // function that can be invoked by the user.
      defaultNav = OpenApi.Document ( Swagger, DefaultOptions ), 
      // The Apis.Guru swagger has a listAPIs function - we'll get a reference to that
      listAPIs = defaultNav{[ Name = "listAPIs" ]}[Data], 
      allAPIs = listAPIs(), 

      // format the results as the nav table we will return
      nav = ApiGuru.FormatApiList ( allAPIs )

    in

      nav
;

  // Process the results of the ApisGuru listAPIs function to return a
  // navigation table with all published APIs.
  ApiGuru.FormatApiList = ( results as record ) as table =>
    if ( results[MoreColumns]? = null ) then
      error "[MoreColumns] not found in result"

    else

      let
        // expand down to the level that provides us with the swagger URL
        MoreColumns = results[MoreColumns], 
        toTable = Record.ToTable ( MoreColumns ), 

        expandAPI = Table.ExpandRecordColumn (
          toTable, 
          "Value", 

          { "added", "preferred", "versions" }

        )
, 

        getPreferredVersion = Table.AddColumn (
          expandAPI, 
          "api", 

          each try Record.Field ( [versions], [preferred] ) otherwise null

        )
, 

        expandSwaggerInfo = Table.ExpandRecordColumn (
          getPreferredVersion, 
          "api", 

          { "info", "swaggerUrl", "updated" }

        )
, 

        // the [Data] column of our navigator will be another call to OpenApi.Document, passing in the swagger for this endpoint
        withData = Table.AddColumn (
          expandSwaggerInfo, 
          "Data", 

          each OpenApi.Document ( Web.Contents ( [swaggerUrl] ), DefaultOptions ), 

          Table.Type

        )
, 

        withKind = Table.AddColumn ( withData, "ItemKind", each "Feed", Text.Type ), 

        withName = Table.AddColumn ( withKind, "ItemName", each "Table", Text.Type ), 

        withLeaf = Table.AddColumn ( withName, "IsLeaf", each false, Logical.Type ), 

        asNav = Table.ToNavigationTable (
          withLeaf, 
          { "Name" }, 

          "Name", 

          "Data", 

          "ItemKind", 

          "ItemName", 

          "IsLeaf"

        )

      in

        asNav
;

  // Data Source Kind description
  OpenApiSample = [ Authentication = [ Anonymous = [] ] ]
;

  // Data Source UI publishing description
  OpenApiSample.ApisGuru.Publish = [
    Beta         = true, 
    Category     = "Other", 

    ButtonText   = { "Swagger sample - APIs Guru", "Swagger sample - APIs Guru" }, 

    LearnMoreUrl = "https://powerbi.microsoft.com/"

  ]
;

  // Data Source UI publishing description
  OpenApiSample.Petstore.Publish = [
    Beta         = true, 
    Category     = "Other", 

    ButtonText   = { "Swagger sample - Petstore", "Swagger sample - Petstore" }, 

    LearnMoreUrl = "https://powerbi.microsoft.com/"

  ]
;

  //
  // Common functions
  //
  Table.ToNavigationTable = (
    table as table, 
    keyColumns as list, 

    nameColumn as text, 

    dataColumn as text, 

    itemKindColumn as text, 

    itemNameColumn as text, 

    isLeafColumn as text

  )
 as table =>
    let
      tableType = Value.Type ( table ), 
      newTableType = Type.AddTableKey ( tableType, keyColumns, true )
        meta
 [
          NavigationTable.NameColumn     = nameColumn, 
          NavigationTable.DataColumn     = dataColumn, 

          NavigationTable.ItemKindColumn = itemKindColumn, 

          Preview.DelayColumn            = itemNameColumn, 

          NavigationTable.IsLeafColumn   = isLeafColumn

        ]
, 

      navigationTable = Value.ReplaceType ( table, newTableType )

    in

      navigationTable
;

OpenApiSample.query.pq

// Use this file to write queries to test your data connector
let
  result = OpenApiSample.ApisGuru()

in

  result

RedshiftODBC.pq

section RedshiftODBC;
  // When set to true, additional trace information will be written out to the User log.
  // This should be set to false before release. Tracing is done through a call to
  // Diagnostics.LogValue(). When EnableTraceOutput is set to false, the call becomes a
  // no-op and simply returns the original value.
  EnableTraceOutput = true
;

  Config_DriverName = "Amazon Redshift (x86)";

  Config_SqlConformance = ODBC[SQL_SC][SQL_SC_SQL92_FULL];

  // null, 1, 2, 4, 8
  Config_DefaultUsernamePasswordHandling = true
;

  // true, false
  Config_UseParameterBindings = true
;

  // true, false, null
  Config_StringLiterateEscapeCharacters = { "\" }
;

  // ex. { "\" }
  Config_UseCastInsteadOfConvert = true
;

  // true, false, null
  Config_SupportsTop = true
;

  // true, false
  Config_EnableDirectQuery = true
;

  // true, false
  // This is a work around for the lack of available conversion functions in the driver.
  ImplicitTypeConversions = #table (
    { "Type1", "Type2", "ResultType" }, 
    {
      // 'bpchar' char is added here to allow it to be converted to 'char' when compared against constants.
      { "bpchar", "char", "char" }

    }

  )
;

  [ DataSource.Kind = "RedshiftODBC", Publish = "RedshiftODBC.UI" ]
  shared
 RedshiftODBC.Database = ( server as text, database as text, optional options as record ) as table =>
    let
      ConnectionString = GetAddress ( server )
        &
 [ Driver = Config_DriverName, UseUnicode = "yes", Database = database ]
, 

      Credential = Extension.CurrentCredential(), 

      encryptionEnabled = Credential[EncryptConnection]? = true, 

      CredentialConnectionString = [
        SSLMode     = if encryptionEnabled then "verify-full" else "prefer", 
        UID         = Credential[Username], 

        PWD         = Credential[Password], 

        BoolsAsChar = 0, 

        MaxVarchar  = 65535

      ]
, 

      defaultConfig = BuildOdbcConfig(), 

      SqlCapabilities = defaultConfig[SqlCapabilities]
        &
 [
          // place custom overrides here
          GroupByCapabilities = ODBC[SQL_GB][SQL_GB_NO_RELATION], 
          FractionalSecondsScale = 3

        ]
, 

      SQLGetInfo = defaultConfig[SQLGetInfo] & [], 

      SQLGetTypeInfo = ( types ) =>
        let
          original = 
            if
 ( EnableTraceOutput <> true ) then
              types

            else

              let
                // Outputting the entire table might be too large, and result in the value being truncated.
                // We can output a row at a time instead with Table.TransformRows()
                rows = Table.TransformRows (
                  types, 
                  each Diagnostics.LogValue ( "SQLGetTypeInfo " & _[TYPE_NAME], _ )

                )
, 

                toTable = Table.FromRecords ( rows )

              in

                toTable
, 

          modified = // older versions of the driver were missing an entry for 'bpchar'
          
            if
 ( Table.IsEmpty ( Table.SelectRows ( original, each [TYPE_NAME] = "bpchar" ) ) ) then
              let
                // add the missing bpchar type by copying the 'char' entry and modifying the relevant values
                charRecord = original{[ TYPE_NAME = "char", DATA_TYPE = - 8 ]}, 
                bpChar = charRecord & [ TYPE_NAME = "bpchar", LOCAL_TYPE_NAME = "bpchar" ], 

                finalTable = original & Table.FromRecords ( { bpChar } )

              in

                finalTable

            else

              original
, 

          modified2 = // use of Redshift spectrum/external tables can add a "float" type
          
            if
 ( Table.IsEmpty ( Table.SelectRows ( modified, each [TYPE_NAME] = "float" ) ) ) then
              let
                origRecord = modified{[ TYPE_NAME = "float8", DATA_TYPE = 6 ]}, 
                newRecord  = origRecord & [ TYPE_NAME = "float", LOCAL_TYPE_NAME = "float" ], 

                finalTable = modified & Table.FromRecords ( { newRecord } )

              in

                finalTable

            else

              modified

        in

          Value.ReplaceType ( modified2, Value.Type ( types ) )
, 

      SQLColumns = ( catalogName, schemaName, tableName, columnName, source ) =>
        if ( EnableTraceOutput <> true ) then
          source

        else
 // the if statement conditions will force the values to evaluated/written to diagnostics
        if (
          Diagnostics.LogValue ( "SQLColumns.TableName", tableName )
            <>
 "***"

            and
 Diagnostics.LogValue ( "SQLColumns.ColumnName", columnName ) <> "***"

        )

        then

          let
            // Outputting the entire table might be too large, and result in the value being truncated.
            // We can output a row at a time instead with Table.TransformRows()
            rows = Table.TransformRows ( source, each Diagnostics.LogValue ( "SQLColumns", _ ) ), 
            toTable = Table.FromRecords ( rows )

          in

            Value.ReplaceType ( toTable, Value.Type ( source ) )

        else

          source
, 

      OnError = ( errorRecord as record ) =>
        if Text.Contains ( errorRecord[Message], "password authentication failed" ) then
          error Extension.CredentialError ( Credential.AccessDenied, errorRecord[Message] )

        else
 if encryptionEnabled
          and
 Text.Contains ( errorRecord[Message], "root.crt"" does not exist" )

        then

          error Extension.CredentialError (
            Credential.EncryptionNotSupported, 
            errorRecord[Message]

          )

        else

          error errorRecord
, 

      AstVisitor = [
        // Decimal literals are always of type "numeric" which prevents high range numbers to be used.
        // We therefore wrap float4 and float8 with CAST for that purpose.
        Constant = 
          let

            Cast = ( value, typeName ) =>
              [ Text = Text.Format ( "CAST(#{0} as #{1})", { value, typeName } ) ]
, 

            Visitor = [ float8 = each Cast ( _, "float8" ), float4 = each Cast ( _, "float4" ) ]

          in

            ( typeInfo, ast ) =>
              Record.FieldOrDefault ( Visitor, typeInfo[TYPE_NAME], each null )( ast[Value] )

      ]
, 

      OdbcDatasource = Odbc.DataSource (
        ConnectionString, 
        [
          HierarchicalNavigation = true, 
          HideNativeQuery = true, 

          ClientConnectionPooling = true, 

          ImplicitTypeConversions = ImplicitTypeConversions, 

          OnError = OnError, 

          // These values should be set by previous steps
          CredentialConnectionString = CredentialConnectionString, 
          AstVisitor = AstVisitor, 

          SqlCapabilities = SqlCapabilities, 

          SQLColumns = SQLColumns, 

          SQLGetInfo = SQLGetInfo, 

          SQLGetTypeInfo = SQLGetTypeInfo

        ]

      )
, 

      Database = OdbcDatasource{[ Name = database ]}[Data], 

      RemovedSystemTable = Table.SelectRows ( Database, each [Name] <> "pg_internal" ),  // non-queryable system table

      FixNestedNavigationTables = 
        let

          ColumnType = Type.TableColumn ( Value.Type ( RemovedSystemTable ), "Data" ), 
          AddColumn = Table.AddColumn (
            RemovedSystemTable, 
            "Data2", 

            each FixNavigationTable ( [Data] ), 

            ColumnType

          )
, 

          RemovePreviousColumn = Table.RemoveColumns ( AddColumn, { "Data" } ), 

          RenameColumn = Table.RenameColumns ( RemovePreviousColumn, { { "Data2", "Data" } } )

        in

          RenameColumn
, 

      Fixed = FixNavigationTable ( FixNestedNavigationTables )

    in

      Fixed
;

  GetAddress = ( server as text ) as record =>
    let
      Address = Uri.Parts ( "http://" & server ), 
      Port = 
        if
 Address[Port] = 80 and not Text.EndsWith ( server, ":80" ) then
          []

        else

          [ Port = Address[Port] ]
, 

      Server = [ Server = Address[Host] ], 

      ConnectionString = Server & Port, 

      Result = 
        if
 Address[Host]
          =
 ""

          or
 Address[Scheme] <> "http"

          or
 Address[Path] <> "/"

          or
 Address[Query] <> []

          or
 Address[Fragment] <> ""

          or
 Address[UserName] <> ""

          or
 Address[Password] <> ""

          or
 Text.StartsWith ( server, "http:/", Comparer.OrdinalIgnoreCase )

        then

          error "Invalid server name"

        else

          ConnectionString

    in

      Result
;

  // Fixes navigation table return from ODBC hierarchical navigation
  // to not have Kind as part of the key and to remove columns that return
  // not value for this ODBC driver such as "Description".
  FixNavigationTable = ( table ) =>
    let
      SelectColumns = Table.SelectColumns ( table, { "Name", "Data", "Kind" } ), 
      OriginalType = Value.Type ( SelectColumns ), 

      Type = type table [
        Name = Type.TableColumn ( OriginalType, "Name" ), 
        Data = Type.TableColumn ( OriginalType, "Data" ), 

        Kind = Type.TableColumn ( OriginalType, "Kind" )

      ]
, 

      AddKey = Type.AddTableKey ( Type, { "Name" }, true ), 

      AddMetadata = AddKey
        meta
 [
          NavigationTable.NameColumn = "Name", 
          NavigationTable.DataColumn = "Data", 

          NavigationTable.KindColumn = "Kind", 

          Preview.DelayColumn        = "Data"

        ]
, 

      ReplaceType = Value.ReplaceType ( SelectColumns, AddMetadata )

    in

      ReplaceType
;

  RedshiftODBC = [
    TestConnection = ( dataSourcePath ) =>
      let
        json     = Json.Document ( dataSourcePath ), 
        server   = json[server], 

        database = json[database]

      in

        { "RedshiftODBC.Database", server, database }
, 

    Authentication = [ UsernamePassword = [] ], 

    SupportsEncryption = true

  ]
;

  RedshiftODBC.UI = [
    ButtonText          = { "RedshiftODBC Sample", "RedshiftODBC Sample" }, 
    Category            = "Database", 

    SupportsDirectQuery = true

  ]
;

  // build settings based on configuration variables
  BuildOdbcConfig = () as record =>
    let
      defaultConfig = [ SqlCapabilities = [], SQLGetFunctions = [], SQLGetInfo = [] ], 
      withParams = 
        if
 ( Config_UseParameterBindings = false ) then
          let
            caps = defaultConfig[SqlCapabilities]
              &
 [
                SqlCapabilities = [
                  SupportsNumericLiterals       = true, 
                  SupportsStringLiterals        = true, 

                  SupportsOdbcDateLiterals      = true, 

                  SupportsOdbcTimeLiterals      = true, 

                  SupportsOdbcTimestampLiterals = true

                ]

              ]
, 

            funcs = defaultConfig[SQLGetFunctions]
              &
 [ SQLGetFunctions = [ SQL_API_SQLBINDPARAMETER = false ] ]

          in

            defaultConfig & caps & funcs

        else

          defaultConfig
, 

      withEscape = 
        if
 ( Config_StringLiterateEscapeCharacters <> null ) then
          let
            caps = withParams[SqlCapabilities]
              &
 [
                SqlCapabilities = [
                  StringLiteralEscapeCharacters = Config_StringLiterateEscapeCharacters

                ]

              ]

          in

            withParams & caps

        else

          withParams
, 

      withTop = 
        let

          caps = withEscape[SqlCapabilities]
            &
 [ SqlCapabilities = [ SupportsTop = Config_SupportsTop ] ]

        in

          withEscape & caps
, 

      withCastOrConvert = 
        if
 ( Config_UseCastInsteadOfConvert = true ) then
          let
            caps = withTop[SQLGetFunctions]
              &
 [ SQLGetFunctions = [ SQL_CONVERT_FUNCTIONS = 0x2 /* SQL_FN_CVT_CAST */ ] ]

          in

            withTop & caps

        else

          withTop
, 

      withSqlConformance = 
        if
 ( Config_SqlConformance <> null ) then
          let
            caps = withCastOrConvert[SQLGetInfo]
              &
 [ SQLGetInfo = [ SQL_SQL_CONFORMANCE = Config_SqlConformance ] ]

          in

            withCastOrConvert & caps

        else

          withCastOrConvert

    in

      withSqlConformance
;

  //
  // Load common library functions
  //
  Extension.LoadFunction = ( name as text ) =>
    let
      binary = Extension.Contents ( name ), 
      asText = Text.FromBinary ( binary )

    in

      Expression.Evaluate ( asText, #shared )
;

  // Diagnostics module contains multiple functions. We can take the ones we need.
  Diagnostics = Extension.LoadFunction ( "Diagnostics.pqm" )
;

  Diagnostics.LogValue = 
    if
 ( EnableTraceOutput ) then
      Diagnostics[LogValue]

    else

      ( prefix, value ) => value
;

  // OdbcConstants contains numeric constants from the ODBC header files, and a
  // helper function to create bitfield values.
  ODBC = Extension.LoadFunction ( "OdbcConstants.pqm" )
;

  Odbc.Flags = ODBC[Flags];

RedshiftODBC.query.pq

let
  result = RedshiftODBC.Database ( "server.redshift.amazonaws.com:5439", "database" )

in

  result

Relationships.pq

// This sample shows how to define relationships between tables that
// will be automatically detected in Power BI Desktop.
section Relationships;
  // Base navigation table, no relationships set
  [ DataSource.Kind = "Relationships", Publish = "Relationships.PublishNone" ]
  shared
 Relationships.None = () =>
    let
      base = #table (
        type table [ Name = Text.Type, Data = Table.Type ], 
        { { "Customers", Customers }, { "Orders", Orders }, { "OrderDetails", OrderDetails } }

      )
, 

      nav = CreateNavTable ( base )

    in

      nav
;

  // Relationships set using Table.NestedJoin.
  // This function leaves the joined columns on the Orders table,
  // which makes it easy for a consumer to expand/merge the tables inline.
  [ DataSource.Kind = "Relationships", Publish = "Relationships.PublishNested" ]
  shared
 Relationships.Nested = () =>
    let
      ordersWithDetail = Table.NestedJoin (
        Orders, 
        { "OrderId" }, 

        OrderDetails, 

        { "OrderId" }, 

        "OrderDetails", 

        JoinKind.LeftOuter

      )
, 

      ordersWithCustomers = Table.NestedJoin (
        ordersWithDetail, 
        { "CustomerId" }, 

        Customers, 

        { "CustomerId" }, 

        "Customer", 

        JoinKind.LeftOuter

      )
, 

      base = #table (
        type table [ Name = Text.Type, Data = Table.Type ], 
        {
          { "Customers", Customers }, 
          { "Orders", ordersWithCustomers }, 

          { "OrderDetails", OrderDetails }

        }

      )
, 

      nav = CreateNavTable ( base )

    in

      nav
;

  // Relationships set using Table.NestedJoin.
  // This function removes the joined columns, but
  // leaves the relationship metadata in place.
  [ DataSource.Kind = "Relationships", Publish = "Relationships.PublishImplicit" ]
  shared
 Relationships.Implicit = () =>
    let
      ordersWithDetail = Table.NestedJoin (
        Orders, 
        { "OrderId" }, 

        OrderDetails, 

        { "OrderId" }, 

        "OrderDetails", 

        JoinKind.LeftOuter

      )
, 

      ordersWithCustomers = Table.NestedJoin (
        ordersWithDetail, 
        { "CustomerId" }, 

        Customers, 

        { "CustomerId" }, 

        "Customer", 

        JoinKind.LeftOuter

      )
, 

      // remove the join columns
      removedJoins = Table.RemoveColumns ( ordersWithCustomers, { "OrderDetails", "Customer" } ), 
      base = #table (
        type table [ Name = Text.Type, Data = Table.Type ], 
        { { "Customers", Customers }, { "Orders", removedJoins }, { "OrderDetails", OrderDetails } }

      )
, 

      nav = CreateNavTable ( base )

    in

      nav
;

  // common function to generate format the results into a nav table.
  // expects table with columns { "Name", "Data" }
  CreateNavTable = ( base as table ) as table =>
    let
      withItemKind = Table.AddColumn ( base, "ItemKind", each "Table", type text ), 
      withItemName = Table.AddColumn ( withItemKind, "ItemName", each "Table", type text ), 

      withIsLeaf = Table.AddColumn ( withItemName, "IsLeaf", each true, type logical ), 

      navTable = Table.ToNavigationTable (
        withIsLeaf, 
        { "Name" }, 

        "Name", 

        "Data", 

        "ItemKind", 

        "ItemName", 

        "IsLeaf"

      )

    in

      navTable
;

  // Static tables for the sample
  Customers = Table.AddKey (
    #table (
      type table [ CustomerId = Int64.Type, Name = Text.Type ], 
      { { 1, "Tom" }, { 2, "Bob" }, { 3, "Mary" } }

    )
, 

    { "CustomerId" }, 

    true

  )
;

  Orders = Table.AddKey (
    #table (
      type table [ OrderId = Int64.Type, CustomerId = Int64.Type, OrderDate = Date.Type ], 
      {
        { 1, 1, #date ( 2018, 1, 1 ) }, 
        { 2, 1, #date ( 2018, 2, 1 ) }, 

        { 3, 1, #date ( 2018, 3, 1 ) }, 

        { 4, 1, #date ( 2018, 4, 1 ) }, 

        { 5, 3, #date ( 2018, 5, 1 ) }, 

        { 6, 3, #date ( 2018, 6, 1 ) }, 

        { 7, 3, #date ( 2018, 7, 1 ) }

      }

    )
, 

    { "OrderId" }, 

    true

  )
;

  OrderDetails = Table.AddKey (
    #table (
      type table [
        OrderId = Int64.Type, 
        LineItem = Int16.Type, 

        Detail = Text.Type, 

        Amount = Currency.Type

      ]
, 

      {
        { 1, 1, "Book", 5.30 }, 
        { 1, 2, "Book", 3.99 }, 

        { 1, 3, "Supplies", 89.99 }, 

        { 2, 1, "Book", 5.30 }, 

        { 3, 1, "Book", 2.00 }, 

        { 3, 2, "Supplies", 1.54 }, 

        { 4, 1, "Book", 2.00 }, 

        { 5, 1, "Book", 2.00 }, 

        { 6, 1, "Book", 2.00 }, 

        { 7, 1, "Book", 2.00 }

      }

    )
, 

    { "OrderId", "LineItem" }, 

    true

  )
;

  // Data Source Kind description
  Relationships = [ Authentication = [ Anonymous = [] ] ]
;

  Relationships.PublishNone = [
    Beta         = true, 
    Category     = "Other", 

    ButtonText   = { "Relationship - None", "Relationship - None" }, 

    LearnMoreUrl = "https://powerbi.microsoft.com/"

  ]
;

  Relationships.PublishNested = [
    Beta         = true, 
    Category     = "Other", 

    ButtonText   = { "Relationship - Nested", "Relationship - Nested" }, 

    LearnMoreUrl = "https://powerbi.microsoft.com/"

  ]
;

  Relationships.PublishImplicit = [
    Beta         = true, 
    Category     = "Other", 

    ButtonText   = { "Relationship - Implicit", "Relationship - Implicit" }, 

    LearnMoreUrl = "https://powerbi.microsoft.com/"

  ]
;

  //
  // Load common library functions
  //
  // TEMPORARY WORKAROUND until we're able to reference other M modules
  Extension.LoadFunction = ( name as text ) =>
    let
      binary = Extension.Contents ( name ), 
      asText = Text.FromBinary ( binary )

    in

      Expression.Evaluate ( asText, #shared )
;

  Table.ToNavigationTable = Extension.LoadFunction ( "Table.ToNavigationTable.pqm" );

Relationships.query.pq

// Use this file to write queries to test your data connector
let
  result = Relationships.Implicit()

in

  result

SnowflakeODBC.pq

section SnowflakeODBC;
  // When set to true, additional trace information will be written out to the User log.
  // This should be set to false before release. Tracing is done through a call to
  // Diagnostics.LogValue(). When EnableTraceOutput is set to false, the call becomes a
  // no-op and simply returns the original value.
  EnableTraceOutput = false
;

  [ DataSource.Kind = "SnowflakeODBC", Publish = "SnowflakeODBC.UI" ]
  shared
 SnowflakeODBC.Databases = (
    server as text, 
    warehouse as text, 

    optional options as nullable record

  )
 as table =>
    let
      Host = GetHost ( server ), 
      ConnectionTimeoutOption = GetTimeoutOption ( options, "ConnectionTimeout" ), 

      QueryTimeoutOption = GetTimeoutOption ( options, "CommandTimeout" ), 

      BaseConnectionString = 
        if
 options
          <>
 null

          and
 List.Count ( List.Difference ( Record.FieldNames ( options ), ValidOptions ) ) > 0

        then

          error Error.Record ( "Expression.Error", "InvalidOptionsKey" )

        else

          [ driver = "SnowflakeDSIIDriver", server = Host, warehouse = warehouse ]
, 

      WithLoginTimeoutOption = AddConnectionStringOption (
        BaseConnectionString, 
        "login_timeout", 

        ConnectionTimeoutOption

      )
, 

      WithNetworkTimeoutOption = AddConnectionStringOption (
        WithLoginTimeoutOption, 
        "network_timeout", 

        ConnectionTimeoutOption

      )
, 

      ConnectionString = AddConnectionStringOption (
        WithNetworkTimeoutOption, 
        "query_timeout", 

        QueryTimeoutOption

      )
, 

      Options = [
        // Snowflake supports "LIMIT <take> [OFFSET <skip>]". While supported in the documented grammar,
        // using OFFSET without LIMIT produces a syntax error so we can't use LimitClauseKind.LimitOffset.
        LimitClauseKind = LimitClauseKind.Limit, 
        AstVisitor = [
          Constant = 
            let

              Quote = each Text.Format ( "'#{0}'", { _ } ), 
              Cast = ( value, typeName ) =>
                [ Text = Text.Format ( "CAST(#{0} as #{1})", { value, typeName } ) ]
, 

              Visitor = [
                // This is to work around parameters being converted to VARCHAR
                // and to work around driver crash when using TYPE_TIME parameters.
                NUMERIC = each Cast ( _, "NUMERIC" ), 
                DECIMAL = each Cast ( _, "DECIMAL" ), 

                INTEGER = each Cast ( _, "INTEGER" ), 

                FLOAT = each Cast ( _, "FLOAT" ), 

                REAL = each Cast ( _, "REAL" ), 

                DOUBLE = each Cast ( _, "DOUBLE" ), 

                DATE = each Cast ( Quote ( Date.ToText ( _, "yyyy-MM-dd" ) ), "DATE" ), 

                TIMESTAMP = each Cast (
                  Quote ( DateTime.ToText ( _, "yyyy-MM-dd HH:mm:ss.sssssss" ) ), 
                  "TIMESTAMP"

                )
, 

                TIME = each Cast ( Quote ( Time.ToText ( _, "HH:mm:ss.sssssss" ) ), "TIME" )

              ]

            in

              ( typeInfo, ast ) =>
                Record.FieldOrDefault ( Visitor, typeInfo[TYPE_NAME], each null )( ast[Value] )

        ]
, 

        ClientConnectionPooling = true, 

        SqlCapabilities = [
          Sql92Conformance       = 8,  /* SQL_SC_SQL92_FULL */
          FractionalSecondsScale = 3, 

          MaxParameters          = 50

        ]
, 

        OnError = ( errorRecord as record ) =>
          if errorRecord[Reason] = DataSourceMissingClientLibrary then
            error Error.Record (
              DataSourceMissingClientLibrary, 
              Text.Format ( "Missing client library", { DriverDownloadUrl } ), 

              DriverDownloadUrl

            )

          else
 if errorRecord[Reason]
            =
 DataSourceError

            and
 not Table.IsEmpty (
              Table.SelectRows ( errorRecord[Detail][OdbcErrors], each [SQLState] = "57P03" )

            )

          then

            error Error.Record (
              DataSourceError, 
              Text.Format ( "warehouse suspended", { warehouse } ), 

              errorRecord[Detail]

            )

          else

            error errorRecord
, 

        // SQLGetTypeInfo can be specified in two ways:
        // 1. A #table() value that returns the same type information as an ODBC
        // call to SQLGetTypeInfo.
        // 2. A function that accepts a table argument, and returns a table. The
        // argument will contain the original results of the ODBC call to SQLGetTypeInfo.
        // Your function implementation can modify/add to this table.
        //
        // For details of the format of the types table parameter and expected return value,
        // please see: https://docs.microsoft.com/en-us/sql/odbc/reference/syntax/sqlgettypeinfo-function
        //
        // The sample implementation provided here will simply output the original table
        // to the user trace log, without any modification.
        SQLGetTypeInfo = ( types ) =>
          if ( EnableTraceOutput <> true ) then
            types

          else

            let
              // Outputting the entire table might be too large, and result in the value being truncated.
              // We can output a row at a time instead with Table.TransformRows()
              rows = Table.TransformRows (
                types, 
                each Diagnostics.LogValue ( "SQLGetTypeInfo " & _[TYPE_NAME], _ )

              )
, 

              toTable = Table.FromRecords ( rows )

            in

              Value.ReplaceType ( toTable, Value.Type ( types ) )
, 

        // This is to work around the driver returning the deprecated
        // TIMESTAMP, DATE and TIME instead of TYPE_TIMESTAMP, TYPE_DATE and TYPE_TIME
        // for column metadata returned by SQLColumns. The column types also don't
        // match the types that are returned by SQLGetTypeInfo.
        SQLColumns = ( catalogName, schemaName, tableName, columnName, source ) =>
          let
            OdbcSqlType.DATETIME = 9, 
            OdbcSqlType.TYPE_DATE = 91, 

            OdbcSqlType.TIME = 10, 

            OdbcSqlType.TYPE_TIME = 92, 

            OdbcSqlType.TIMESTAMP = 11, 

            OdbcSqlType.TYPE_TIMESTAMP = 93, 

            FixDataType = ( dataType ) =>
              if dataType = OdbcSqlType.DATETIME then
                OdbcSqlType.TYPE_DATE

              else
 if dataType = OdbcSqlType.TIME then
                OdbcSqlType.TYPE_TIME

              else
 if dataType = OdbcSqlType.TIMESTAMP then
                OdbcSqlType.TYPE_TIMESTAMP

              else

                dataType
, 

            Transform = Table.TransformColumns ( source, { { "DATA_TYPE", FixDataType } } )

          in

            if ( EnableTraceOutput <> true ) then
              Transform

            else
 // the if statement conditions will force the values to evaluated/written to diagnostics
            if (
              Diagnostics.LogValue ( "SQLColumns.TableName", tableName )
                <>
 "***"

                and
 Diagnostics.LogValue ( "SQLColumns.ColumnName", columnName ) <> "***"

            )

            then

              let
                // Outputting the entire table might be too large, and result in the value being truncated.
                // We can output a row at a time instead with Table.TransformRows()
                rows = Table.TransformRows (
                  Transform, 
                  each Diagnostics.LogValue ( "SQLColumns", _ )

                )
, 

                toTable = Table.FromRecords ( rows )

              in

                Value.ReplaceType ( toTable, Value.Type ( Transform ) )

            else

              Transform
, 

        HierarchicalNavigation = true, 

        HideNativeQuery = true, 

        SoftNumbers = true

      ]
, 

      Databases = Odbc.DataSource ( ConnectionString, Options ), 

      Metadata = Value.Metadata ( Value.Type ( Databases ) ), 

      RemovedInformationSchema = 
        let

          RemoveInformationSchema = ( data ) =>
            Table.SelectRows ( data, each [Name] <> "INFORMATION_SCHEMA" )
, 

          TransformColumns = Table.TransformColumns (
            Databases, 
            { "Data", RemoveInformationSchema }

          )

        in

          TransformColumns
, 

      TransformComplexColumns = 
        let

          TransformComplexColumns = ( table ) =>
            let
              TableType = Value.Type ( table ), 
              Schema = Table.Schema ( table ), 

              ComplexColumns = Table.ToRecords (
                Table.SelectRows (
                  Schema, 
                  each [NativeTypeName] = "STRUCT" or [NativeTypeName] = "ARRAY"

                )

              )
, 

              TransformOperations = List.Accumulate (
                ComplexColumns, 
                table, 

                ( state, column ) =>
                  let
                    ColumnType = Type.TableColumn ( TableType, column[Name] ), 
                    PreservedFacetFields = {
                      "NativeTypeName", 
                      "NativeDefaultExpression", 

                      "NativeExpression"

                    }
, 

                    Facets = Record.SelectFields (
                      Type.Facets ( ColumnType ), 
                      PreservedFacetFields

                    )
, 

                    ComplexType = 
                      if
 column[NativeTypeName] = "STRUCT" then
                        type record

                      else

                        type list
, 

                    AddNullable = 
                      if
 Type.IsNullable ( ComplexType ) then
                        type nullable ComplexType

                      else

                        ComplexType
, 

                    TypeWithFacets = Type.ReplaceFacets ( AddNullable, Facets ), 

                    ToNullableJson = ( value ) =>
                      if value = null then null else Json.Document ( value )
, 

                    TransformColumn = Table.TransformColumns (
                      state, 
                      { column[Name], ToNullableJson, TypeWithFacets }

                    )

                  in

                    TransformColumn

              )

            in

              TransformOperations
, 

          TransformColumnKeepType = ( table, columnName, operation ) =>
            let
              TableType = Value.Type ( table ), 
              TransformColumn = Table.TransformColumns (
                table, 
                { columnName, operation, Type.TableColumn ( table, columnName ) }

              )
, 

              ReplaceType = Value.ReplaceType ( TransformColumn, TableType )

            in

              ReplaceType
, 

          TransformColumns = TransformColumnKeepType (
            RemovedInformationSchema, 
            "Data", 

            ( schemas ) =>
              TransformColumnKeepType (
                schemas, 
                "Data", 

                ( tables ) => TransformColumnKeepType ( tables, "Data", TransformComplexColumns )

              )

          )

        in

          TransformColumns
, 

      WithMetadata = Value.ReplaceType (
        TransformComplexColumns, 
        Value.ReplaceMetadata ( Value.Type ( RemovedInformationSchema ), Metadata )

      )

    in

      WithMetadata
;

  GetOption = ( options as nullable record, name as text ) =>
    if options <> null and Record.HasFields ( options, name ) then
      Record.Field ( options, name )

    else

      null
;

  GetTimeoutOption = ( options as nullable record, name as text ) =>
    let
      option = GetOption ( options, name )

    in

      if option <> null then
        if option is number and option >= 0 and NumberIsInteger ( option ) then
          option

        else

          error Error.Record (
            "Expression.Error", 
            Text.Format ( "InvalidTimeoutOptionError: #{0}" ), 

            { name }

          )

      else

        null
;

  GetHost = ( server as text ) as text =>
    let
      Address = Uri.Parts ( "http://" & server )

    in

      if Address[Host]
        =
 ""

        or
 Address[Scheme] <> "http"

        or
 Address[Path] <> "/"

        or
 Address[Query] <> []

        or
 Address[Fragment] <> ""

        or
 Address[UserName] <> ""

        or
 Address[Password] <> ""

        or
 ( Address[Port] <> 80 and Address[Port] <> 443 )

        or
 Text.EndsWith ( server, ":80" )

      then

        error "Invalid server name"

      else

        Address[Host]
;

  AddConnectionStringOption = ( options as record, name as text, value as any ) as record =>
    if value = null then options else Record.AddField ( options, name, value )
;

  NumberIsInteger = ( x as number ) => Number.RoundDown ( x ) = x;

  DriverDownloadUrl = "http://go.microsoft.com/fwlink/?LinkID=823762";

  ValidOptions = { "ConnectionTimeout", "CommandTimeout" };

  DataSourceMissingClientLibrary = "DataSource.MissingClientLibrary";

  DataSourceError = "DataSource.Error";

  SnowflakeODBC = [ Authentication = [ UsernamePassword = [] ] ];

  SnowflakeODBC.UI = [
    ButtonText          = { "SnowflakeODBC Sample", "SnowflakeODBC Sample" }, 
    Category            = "Database", 

    SupportsDirectQuery = true

  ]
;

  //
  // Load common library functions
  //
  Extension.LoadFunction = ( name as text ) =>
    let
      binary = Extension.Contents ( name ), 
      asText = Text.FromBinary ( binary )

    in

      Expression.Evaluate ( asText, #shared )
;

  // Diagnostics module contains multiple functions. We can take the ones we need.
  Diagnostics = Extension.LoadFunction ( "Diagnostics.pqm" )
;

  Diagnostics.LogValue = 
    if
 ( EnableTraceOutput ) then
      Diagnostics[LogValue]

    else

      ( prefix, value ) => value
;

  // OdbcConstants contains numeric constants from the ODBC header files, and a
  // helper function to create bitfield values.
  ODBC = Extension.LoadFunction ( "OdbcConstants.pqm" )
;

  Odbc.Flags = ODBC[Flags];

SnowflakeODBC.query.pq

// Use this file to write queries to test your data connector
let
  result = SnowflakeODBC.Databases ( "server", "warehouse" )

in

  result

SqlODBC.pq

// This connector provides a sample Direct Query enabled connector
// based on an ODBC driver. It is meant as a template for other
// ODBC based connectors that require similar functionality.
//
section SqlODBC;
  // When set to true, additional trace information will be written out to the User log.
  // This should be set to false before release. Tracing is done through a call to
  // Diagnostics.LogValue(). When EnableTraceOutput is set to false, the call becomes a
  // no-op and simply returns the original value.
  EnableTraceOutput = true
;

  // TODO
  // add and handle common options record properties
  // add handling for LIMIT/OFFSET vs. TOP
  // add handling for SSL
  /****************************
* ODBC Driver Configuration
****************************/
  // The name of your ODBC driver.
  //
  Config_DriverName = "SQL Server Native Client 11.0"
;

  // If your driver under-reports its SQL conformance level because it does not
  // support the full range of CRUD operations, but does support the ANSI SQL required
  // to support the SELECT operations performed by Power Query, you can override
  // this value to report a higher conformance level. Please use one of the numeric
  // values below (i.e. 8 for SQL_SC_SQL92_FULL).
  //
  // SQL_SC =
  // [
  // SQL_SC_SQL92_ENTRY = 1,
  // SQL_SC_FIPS127_2_TRANSITIONAL = 2,
  // SQL_SC_SQL92_INTERMEDIATE = 4,
  // SQL_SC_SQL92_FULL = 8
  // ]
  //
  // Set to null to determine the value from the driver.
  //
  Config_SqlConformance = ODBC[SQL_SC][SQL_SC_SQL92_FULL]
;

  // null, 1, 2, 4, 8
  // This setting controls row count limits and offsets. If not set correctly, query
  // folding capabilities for this connector will be extremely limited. You can use
  // the LimitClauseKind constants to match common LIMIT/OFFSET SQL formats. If none
  // of the supported formats match your desired SQL syntax, consider filing a feature
  // request to support your variation.
  //
  // Supporting OFFSET is considerably less important than supporting LIMIT.
  //
  // LimitClauseKind values and formats:
  //
  // LimitClauseKind.Top (LIMIT only, OFFSET not supported)
  // -------------------
  // SELECT TOP 100 *
  // FROM table
  //
  // LimitClauseKind.Limit (LIMIT only, OFFSET not supported)
  // ---------------------
  // SELECT *
  // FROM table
  // LIMIT 100
  //
  // LimitClauseKind.LimitOffset
  // ---------------------------
  // SELECT *
  // FROM table
  // LIMIT 100 OFFSET 200
  //
  // This option requires that the SQL dialect support all three variations:
  // "LIMIT x", "LIMIT x OFFSET y" and "OFFSET y". If your SQL dialect only supports
  // OFFSET when LIMIT is also specified, use LimitClauseKind.Limit instead.
  //
  // LimitClauseKind.AnsiSql2008
  // ---------------------------
  // SELECT *
  // FROM table
  // OFFSET 200 ROWS
  // FETCH FIRST 100 ROWS ONLY
  //
  Config_LimitClauseKind = LimitClauseKind.Top
;

  // see above
  // Set this option to true if your ODBC supports the standard username/password
  // handling through the UID and PWD connection string parameters. If the user
  // selects UsernamePassword auth, the supplied values will be automatically
  // added to the CredentialConnectionString.
  //
  // If you wish to set these values yourself, or your driver requires additional
  // parameters to be set, please set this option to 'false'
  //
  Config_DefaultUsernamePasswordHandling = true
;

  // true, false
  // Some drivers have problems will parameter bindings and certain data types.
  // If the driver supports parameter bindings, then set this to true.
  // When set to false, parameter values will be inlined as literals into the generated SQL.
  // To enable inlining for a limited number of data types, set this value
  // to null and set individual flags through the SqlCapabilities record.
  //
  // Set to null to determine the value from the driver.
  //
  Config_UseParameterBindings = false
;

  // true, false, null
  // Override this setting to force the character escape value.
  // This is typically done when you have set UseParameterBindings to false.
  //
  // Set to null to determine the value from the driver.
  //
  Config_StringLiterateEscapeCharacters = { "\" }
;

  // ex. { "\" }
  // Override this if the driver expects the use of CAST instead of CONVERT.
  // By default, the query will be generated using ANSI SQL CONVERT syntax.
  //
  // Set to null to leave default behavior.
  //
  Config_UseCastInsteadOfConvert = null
;

  // true, false, null
  // Set this to true to enable Direct Query in addition to Import mode.
  //
  Config_EnableDirectQuery = true
;

  // true, false
  [ DataSource.Kind = "SqlODBC", Publish = "SqlODBC.Publish" ]
  shared
 SqlODBC.Contents = ( server as text ) =>
    let
      //
      // Connection string settings
      //
      ConnectionString = [
        Driver = Config_DriverName, 
        // set all connection string properties
        Server = server, 
        ApplicationIntent = "readonly"

      ]
, 

      //
      // Handle credentials
      // Credentials are not persisted with the query and are set through a separate
      // record field - CredentialConnectionString. The base Odbc.DataSource function
      // will handle UsernamePassword authentication automatically, but it is explictly
      // handled here as an example.
      //
      Credential = Extension.CurrentCredential(), 
      CredentialConnectionString = 
        if
 Credential[AuthenticationKind]? = "UsernamePassword" then
          // set connection string parameters used for basic authentication
          [ UID = Credential[Username], PWD = Credential[Password] ]
        else
 if ( Credential[AuthenticationKind]? = "Windows" ) then
          // set connection string parameters used for windows/kerberos authentication
          [ Trusted_Connection = "Yes" ]
        else

          error Error.Record (
            "Error", 
            "Unhandled authentication kind: " & Credential[AuthenticationKind]?

          )
, 

      //
      // Configuration options for the call to Odbc.DataSource
      //
      defaultConfig = Diagnostics.LogValue ( "BuildOdbcConfig", BuildOdbcConfig() ), 
      SqlCapabilities = Diagnostics.LogValue (
        "SqlCapabilities_Options", 
        defaultConfig[SqlCapabilities]
          &
 [
            // place custom overrides here
            FractionalSecondsScale = 3

          ]

      )
, 

      // Please refer to the ODBC specification for SQLGetInfo properties and values.
      // https://github.com/Microsoft/ODBC-Specification/blob/master/Windows/inc/sqlext.h
      SQLGetInfo = Diagnostics.LogValue (
        "SQLGetInfo_Options", 
        defaultConfig[SQLGetInfo]
          &
 [
            // place custom overrides here
            SQL_SQL92_PREDICATES = ODBC[SQL_SP][All], 
            SQL_AGGREGATE_FUNCTIONS = ODBC[SQL_AF][All]

          ]

      )
, 

      // SQLGetTypeInfo can be specified in two ways:
      // 1. A #table() value that returns the same type information as an ODBC
      // call to SQLGetTypeInfo.
      // 2. A function that accepts a table argument, and returns a table. The
      // argument will contain the original results of the ODBC call to SQLGetTypeInfo.
      // Your function implementation can modify/add to this table.
      //
      // For details of the format of the types table parameter and expected return value,
      // please see: https://docs.microsoft.com/en-us/sql/odbc/reference/syntax/sqlgettypeinfo-function
      //
      // The sample implementation provided here will simply output the original table
      // to the user trace log, without any modification.
      SQLGetTypeInfo = ( types ) =>
        if ( EnableTraceOutput <> true ) then
          types

        else

          let
            // Outputting the entire table might be too large, and result in the value being truncated.
            // We can output a row at a time instead with Table.TransformRows()
            rows = Table.TransformRows (
              types, 
              each Diagnostics.LogValue ( "SQLGetTypeInfo " & _[TYPE_NAME], _ )

            )
, 

            toTable = Table.FromRecords ( rows )

          in

            Value.ReplaceType ( toTable, Value.Type ( types ) )
, 

      // SQLColumns is a function handler that receives the results of an ODBC call
      // to SQLColumns(). The source parameter contains a table with the data type
      // information. This override is typically used to fix up data type mismatches
      // between calls to SQLGetTypeInfo and SQLColumns.
      //
      // For details of the format of the source table parameter, please see:
      // https://docs.microsoft.com/en-us/sql/odbc/reference/syntax/sqlcolumns-function
      //
      // The sample implementation provided here will simply output the original table
      // to the user trace log, without any modification.
      SQLColumns = ( catalogName, schemaName, tableName, columnName, source ) =>
        if ( EnableTraceOutput <> true ) then
          source

        else
 // the if statement conditions will force the values to evaluated/written to diagnostics
        if (
          Diagnostics.LogValue ( "SQLColumns.TableName", tableName )
            <>
 "***"

            and
 Diagnostics.LogValue ( "SQLColumns.ColumnName", columnName ) <> "***"

        )

        then

          let
            // Outputting the entire table might be too large, and result in the value being truncated.
            // We can output a row at a time instead with Table.TransformRows()
            rows = Table.TransformRows ( source, each Diagnostics.LogValue ( "SQLColumns", _ ) ), 
            toTable = Table.FromRecords ( rows )

          in

            Value.ReplaceType ( toTable, Value.Type ( source ) )

        else

          source
, 

      OdbcDatasource = Odbc.DataSource (
        ConnectionString, 
        [
          // A logical (true/false) that sets whether to view the tables grouped by their schema names
          HierarchicalNavigation = true, 
          // Prevents execution of native SQL statements. Extensions should set this to true.
          HideNativeQuery = true, 
          // Allows upconversion of numeric types
          SoftNumbers = true, 
          // Allow upconversion / resizing of numeric and string types
          TolerateConcatOverflow = true, 
          // Enables connection pooling via the system ODBC manager
          ClientConnectionPooling = true, 
          // These values should be set by previous steps
          CredentialConnectionString = CredentialConnectionString, 
          SqlCapabilities = SqlCapabilities, 

          SQLColumns = SQLColumns, 

          SQLGetInfo = SQLGetInfo, 

          SQLGetTypeInfo = SQLGetTypeInfo

        ]

      )

    in

      OdbcDatasource
;

  // Data Source Kind description
  SqlODBC = [
    // Set the TestConnection handler to enable gateway support.
    // The TestConnection handler will invoke your data source function to
    // validate the credentials the user has provider. Ideally, this is not
    // an expensive operation to perform. By default, the dataSourcePath value
    // will be a json string containing the required parameters of your data
    // source function. These should be parsed and parsed as individual parameters
    // to the specified data source function.
    TestConnection = ( dataSourcePath ) =>
      let
        json   = Json.Document ( dataSourcePath ), 
        server = json[server] // name of function parameter

      in

        { "SqlODBC.Contents", server }
, 

    // Set supported types of authentication
    Authentication = [ Windows = [], UsernamePassword = [] ], 
    Label = Extension.LoadString ( "DataSourceLabel" )

  ]
;

  // Data Source UI publishing description
  SqlODBC.Publish = [
    Beta = true, 
    Category = "Other", 

    ButtonText = { Extension.LoadString ( "ButtonTitle" ), Extension.LoadString ( "ButtonHelp" ) }, 

    LearnMoreUrl = "https://powerbi.microsoft.com/", 

    SupportsDirectQuery = Config_EnableDirectQuery, 

    SourceImage = SqlODBC.Icons, 

    SourceTypeImage = SqlODBC.Icons

  ]
;

  SqlODBC.Icons = [
    Icon16 = {
      Extension.Contents ( "SqlODBC16.png" ), 
      Extension.Contents ( "SqlODBC20.png" ), 

      Extension.Contents ( "SqlODBC24.png" ), 

      Extension.Contents ( "SqlODBC32.png" )

    }
, 

    Icon32 = {
      Extension.Contents ( "SqlODBC32.png" ), 
      Extension.Contents ( "SqlODBC40.png" ), 

      Extension.Contents ( "SqlODBC48.png" ), 

      Extension.Contents ( "SqlODBC64.png" )

    }

  ]
;

  // build settings based on configuration variables
  BuildOdbcConfig = () as record =>
    let
      Merge = (
        previous as record, 
        optional caps as record, 

        optional funcs as record, 

        optional getInfo as record

      )
 as record =>
        let
          newCaps = 
            if
 ( caps <> null ) then
              previous[SqlCapabilities] & caps

            else

              previous[SqlCapabilities]
, 

          newFuncs = 
            if
 ( funcs <> null ) then
              previous[SQLGetFunctions] & funcs

            else

              previous[SQLGetFunctions]
, 

          newGetInfo = 
            if
 ( getInfo <> null ) then
              previous[SQLGetInfo] & getInfo

            else

              previous[SQLGetInfo]

        in

          [ SqlCapabilities = newCaps, SQLGetFunctions = newFuncs, SQLGetInfo = newGetInfo ]
, 

      defaultConfig = [ SqlCapabilities = [], SQLGetFunctions = [], SQLGetInfo = [] ], 

      withParams = 
        if
 ( Config_UseParameterBindings = false ) then
          let
            caps = [
              SupportsNumericLiterals       = true, 
              SupportsStringLiterals        = true, 

              SupportsOdbcDateLiterals      = true, 

              SupportsOdbcTimeLiterals      = true, 

              SupportsOdbcTimestampLiterals = true

            ]
, 

            funcs = [ SQL_API_SQLBINDPARAMETER = false ]

          in

            Merge ( defaultConfig, caps, funcs )

        else

          defaultConfig
, 

      withEscape = 
        if
 ( Config_StringLiterateEscapeCharacters <> null ) then
          let
            caps = [ StringLiteralEscapeCharacters = Config_StringLiterateEscapeCharacters ]

          in

            Merge ( withParams, caps )

        else

          withParams
, 

      withLimitClauseKind = 
        let

          caps = [ LimitClauseKind = Config_LimitClauseKind ]

        in

          Merge ( withEscape, caps )
, 

      withCastOrConvert = 
        if
 ( Config_UseCastInsteadOfConvert <> null ) then
          let
            value = 
              if
 ( Config_UseCastInsteadOfConvert = true ) then
                ODBC[SQL_FN_CVT][SQL_FN_CVT_CAST]

              else

                ODBC[SQL_FN_CVT][SQL_FN_CVT_CONVERT]
, 

            getInfo = [ SQL_CONVERT_FUNCTIONS = value ]

          in

            Merge ( withLimitClauseKind, null, null, getInfo )

        else

          withLimitClauseKind
, 

      withSqlConformance = 
        if
 ( Config_SqlConformance <> null ) then
          let
            getInfo = [ SQL_SQL_CONFORMANCE = Config_SqlConformance ]

          in

            Merge ( withCastOrConvert, null, null, getInfo )

        else

          withCastOrConvert

    in

      withSqlConformance
;

  //
  // Load common library functions
  //
  Extension.LoadFunction = ( name as text ) =>
    let
      binary = Extension.Contents ( name ), 
      asText = Text.FromBinary ( binary )

    in

      Expression.Evaluate ( asText, #shared )
;

  // Diagnostics module contains multiple functions. We can take the ones we need.
  Diagnostics = Extension.LoadFunction ( "Diagnostics.pqm" )
;

  Diagnostics.LogValue = 
    if
 ( EnableTraceOutput ) then
      Diagnostics[LogValue]

    else

      ( prefix, value ) => value
;

  // OdbcConstants contains numeric constants from the ODBC header files, and a
  // helper function to create bitfield values.
  ODBC = Extension.LoadFunction ( "OdbcConstants.pqm" )
;

SqlODBC.query.pq

// Use this file to write queries to test your data connector
let
  result   = SqlODBC.Contents ( "localhost" ), 
  db       = result{[ Name = "master" ]}[Data], 

  schema   = db{[ Name = "sys" ]}[Data], 

  allViews = schema{[ Name = "all_views" ]}[Data]

in

  Table.FirstN ( allViews, 5 )

TripPin.pq

section TripPin;
  // Data Source Kind description
  TripPin = [
    // TestConnection is required to enable the connector through the Gateway
    TestConnection = ( dataSourcePath ) => { "TripPin.Contents" }, 
    Authentication = [ Anonymous = [] ], 

    Label = "TripPin Part 9 - TestConnection"

  ]
;

  // Data Source UI publishing description
  TripPin.Publish = [
    Beta       = true, 
    Category   = "Other", 

    ButtonText = { "TripPin TestConnection", "TripPin TestConnection" }

  ]
;

  //
  // Implementation
  //
  DefaultRequestHeaders = [
    #"Accept"           = "application/json;odata.metadata=minimal",  // column name and values only
    #"OData-MaxVersion" = "4.0" // we only support v4

  ]
;

  BaseUrl = "http://services.odata.org/v4/TripPinService/";

  // Define our top level table types
  AirlinesType = type table [ AirlineCode = text, Name = text ]
;

  AirportsType = type table [ Name = text, IataCode = text, Location = LocationType ];

  PeopleType = type table [
    UserName = text, 
    FirstName = text, 

    LastName = text, 

    Emails = {text}, 

    AddressInfo = { nullable LocationType }, 

    Gender = nullable text, 

    Concurrency = Int64.Type

  ]
;

  // remaining structured types
  LocationType = type [ Address = text, City = CityType, Loc = LocType ]
;

  CityType = type [ CountryRegion = text, Name = text, Region = text ];

  LocType = type [ #"type" = text, coordinates = {number}, crs = CrsType ];

  CrsType = type [ #"type" = text, properties = record ];

  SchemaTable = #table (
    { "Entity", "Type" }, 
    { { "Airlines", AirlinesType }, { "Airports", AirportsType }, { "People", PeopleType } }

  )
;

  GetSchemaForEntity = ( entity as text ) as type =>
    try
      SchemaTable{[ Entity = entity ]}[Type]

    otherwise
      
        let

          message = Text.Format ( "Couldn't find entity: '#{0}'", { entity } )

        in

          Diagnostics.Trace ( TraceLevel.Error, message, () => error message, true )
;

  [ DataSource.Kind = "TripPin", Publish = "TripPin.Publish" ]
  shared
 TripPin.Contents = () => TripPinNavTable ( BaseUrl ) as table;

  TripPinNavTable = ( url as text ) as table =>
    let
      // Use our schema table as the source of top level items in the navigation tree
      entities = Table.SelectColumns ( SchemaTable, { "Entity" } ), 
      rename = Table.RenameColumns ( entities, { { "Entity", "Name" } } ), 

      // Add Data as a calculated column
      //withData = Table.AddColumn(rename, "Data", each Diagnostics.LogFailure("Error in GetEntity", () => GetEntity(url, "DoesNotExist")), type table),
      //withData = Table.AddColumn(rename, "Data", each GetEntity(url, "DoesNotExist"), type table),
      withData = Table.AddColumn ( rename, "Data", each GetEntity ( url, [Name] ), type table ), 
      // Add ItemKind and ItemName as fixed text values
      withItemKind = Table.AddColumn ( withData, "ItemKind", each "Table", type text ), 
      withItemName = Table.AddColumn ( withItemKind, "ItemName", each "Table", type text ), 

      // Indicate that the node should not be expandable
      withIsLeaf = Table.AddColumn ( withItemName, "IsLeaf", each true, type logical ), 
      // Generate the nav table
      navTable = Table.ToNavigationTable (
        withIsLeaf, 
        { "Name" }, 

        "Name", 

        "Data", 

        "ItemKind", 

        "ItemName", 

        "IsLeaf"

      )

    in

      navTable
;

  TripPin.Feed = ( url as text, optional schema as type ) as table =>
    let
      _url = Diagnostics.LogValue ( "Accessing url", url ), 
      _schema = Diagnostics.LogValue ( "Schema type", schema ), 

      //result = GetAllPagesByNextLink(url, schema)
      result = GetAllPagesByNextLink ( _url, _schema )

    in

      result
;

  GetEntity = ( url as text, entity as text ) as table =>
    let
      fullUrl       = Uri.Combine ( url, entity ), 
      schema        = GetSchemaForEntity ( entity ), 

      result        = TripPin.Feed ( fullUrl, schema ), 

      appliedSchema = Table.ChangeType ( result, schema )

    in

      appliedSchema
;

  GetPage = ( url as text, optional schema as type ) as table =>
    let
      response = Web.Contents ( url, [ Headers = DefaultRequestHeaders ] ), 
      body = Json.Document ( response ), 

      nextLink = GetNextLink ( body ), 

      // If we have no schema, use Table.FromRecords() instead
      // (and hope that our results all have the same fields).
      // If we have a schema, expand the record using its field names
      data = 
        if
 ( schema = null ) then
          Diagnostics.LogFailure (
            "Error converting response body. Are the records uniform?", 
            () => Table.FromRecords ( body[value] )

          )

        else

          let
            // convert the list of records into a table (single column of records)
            asTable = Table.FromList ( body[value], Splitter.SplitByNothing(), { "Column1" } ), 
            fields = Record.FieldNames ( Type.RecordFields ( Type.TableRow ( schema ) ) ), 

            expanded = Table.ExpandRecordColumn ( asTable, "Column1", fields )

          in

            expanded

    in

      data meta [ NextLink = nextLink ]
;

  // Read all pages of data.
  // After every page, we check the "NextLink" record on the metadata of the previous request.
  // Table.GenerateByPage will keep asking for more pages until we return null.
  GetAllPagesByNextLink = ( url as text, optional schema as type ) as table =>
    Table.GenerateByPage (
      ( previous ) =>
        let
          // if previous is null, then this is our first page of data
          nextLink = if ( previous = null ) then url else Value.Metadata ( previous )[NextLink]?, 
          // if NextLink was set to null by the previous call, we know we have no more data
          page = if ( nextLink <> null ) then GetPage ( nextLink, schema ) else null

        in

          page

    )
;

  // In this implementation, 'response' will be the parsed body of the response after the call to Json.Document.
  // We look for the '@odata.nextLink' field and simply return null if it doesn't exist.
  GetNextLink = ( response ) as nullable text =>
    Record.FieldOrDefault ( response, "@odata.nextLink" )
;

  //
  // Load common library functions
  //
  // TEMPORARY WORKAROUND until we're able to reference other M modules
  Extension.LoadFunction = ( name as text ) =>
    let
      binary = Extension.Contents ( name ), 
      asText = Text.FromBinary ( binary )

    in

      Expression.Evaluate ( asText, #shared )
;

  Table.ChangeType = Extension.LoadFunction ( "Table.ChangeType.pqm" );

  Table.GenerateByPage = Extension.LoadFunction ( "Table.GenerateByPage.pqm" );

  Table.ToNavigationTable = Extension.LoadFunction ( "Table.ToNavigationTable.pqm" );

  // Diagnostics module contains multiple functions. We can take the ones we need.
  Diagnostics = Extension.LoadFunction ( "Diagnostics.pqm" )
;

  Diagnostics.LogValue = Diagnostics[LogValue];

  Diagnostics.LogFailure = Diagnostics[LogFailure];

TripPin.query.pq

section TripPinUnitTests;
  shared TripPin.UnitTest = [
    // Put any common variables here if you only want them to be evaluated once
    RootTable = TripPin.Contents(), 
    Airlines = RootTable{[ Name = "Airlines" ]}[Data], 

    Airports = RootTable{[ Name = "Airports" ]}[Data], 

    People = RootTable{[ Name = "People" ]}[Data], 

    // Fact(<Name of the Test>, <Expected Value>, <Actual Value>)
    // <Expected Value> and <Actual Value> can be a literal or let statement
    facts = {
      Fact ( "Check that we have three entries in our nav table", 3, Table.RowCount ( RootTable ) ), 
      Fact ( "We have Airline data?", true, not Table.IsEmpty ( Airlines ) ), 

      Fact ( "We have People data?", true, not Table.IsEmpty ( People ) ), 

      Fact ( "We have Airport data?", true, not Table.IsEmpty ( Airports ) ), 

      Fact ( "Airlines only has 2 columns", 2, List.Count ( Table.ColumnNames ( Airlines ) ) ), 

      Fact (
        "Airline table has the right fields", 
        { "AirlineCode", "Name" }, 

        Record.FieldNames ( Type.RecordFields ( Type.TableRow ( Value.Type ( Airlines ) ) ) )

      )
, 

      Fact (
        "Emails is properly typed", 
        type text, 

        Type.ListItem ( Value.Type ( People{0}[Emails] ) )

      )

    }
, 

    report = Facts.Summarize ( facts )

  ]
[report]
;

  /// COMMON UNIT TESTING CODE
  Fact = ( _subject as text, _expected, _actual ) as record =>
    [
      expected = try _expected, 
      safeExpected = 
        if
 expected[HasError] then
          "Expected : " & @ValueToText ( expected[Error] )

        else

          expected[Value]
, 

      actual = try _actual, 

      safeActual = 
        if
 actual[HasError] then
          "Actual : " & @ValueToText ( actual[Error] )

        else

          actual[Value]
, 

      attempt = try safeExpected = safeActual, 

      result = if attempt[HasError] or not attempt[Value] then "Failure ⛔" else "Success ✓", 

      resultOp = if result = "Success ✓" then " = " else " <> ", 

      addendumEvalAttempt = if attempt[HasError] then @ValueToText ( attempt[Error] ) else "", 

      addendumEvalExpected = try @ValueToText ( safeExpected ) otherwise "...", 

      addendumEvalActual = try @ValueToText ( safeActual ) otherwise "...", 

      fact = [
        Result  = result & " " & addendumEvalAttempt, 
        Notes   = _subject, 

        Details = " (" & addendumEvalExpected & resultOp & addendumEvalActual & ")"

      ]

    ]
[fact]
;

  Facts = ( _subject as text, _predicates as list ) =>
    List.Transform ( _predicates, each Fact ( _subject, _{0}, _{1} ) )
;

  Facts.Summarize = ( _facts as list ) as table =>
    [
      Fact.CountSuccesses = ( count, i ) =>
        [
          result = try i[Result], 
          sum = 
            if
 result[HasError] or not Text.StartsWith ( result[Value], "Success" ) then
              count

            else

              count + 1

        ]
[sum]
, 

      passed = List.Accumulate ( _facts, 0, Fact.CountSuccesses ), 

      total = List.Count ( _facts ), 

      format = if passed = total then "All #{0} Passed !!! ✓" else "#{0} Passed ☺ #{1} Failed ☹", 

      result = if passed = total then "Success" else "⛔", 

      rate = Number.IntegerDivide ( 100 * passed, total ), 

      header = [
        Result  = result, 
        Notes   = Text.Format ( format, { passed, total - passed } ), 

        Details = Text.Format ( "#{0}% success rate", { rate } )

      ]
, 

      report = Table.FromRecords ( List.Combine ( { { header }, _facts } ) )

    ]
[report]
;

  ValueToText = ( value, optional depth ) =>
    let
      List.TransformAndCombine = ( list, transform, separator ) =>
        Text.Combine ( List.Transform ( list, transform ), separator )
, 

      Serialize.Binary = ( x ) => "#binary(" & Serialize ( Binary.ToList ( x ) ) & ") ", 

      Serialize.Function = ( x ) =>
        _serialize_function_param_type (
          Type.FunctionParameters ( Value.Type ( x ) ), 
          Type.FunctionRequiredParameters ( Value.Type ( x ) )

        )

          &
 " as "

          &
 _serialize_function_return_type ( Value.Type ( x ) )

          &
 " => (...) "
, 

      Serialize.List = ( x ) => "{" & List.TransformAndCombine ( x, Serialize, ", " ) & "} ", 

      Serialize.Record = ( x ) =>
        "[ "
          &
 List.TransformAndCombine (
            Record.FieldNames ( x ), 
            ( item ) =>
              Serialize.Identifier ( item ) & " = " & Serialize ( Record.Field ( x, item ) )
, 

            ", "

          )

          &
 " ] "
, 

      Serialize.Table = ( x ) =>
        "#table( type "
          &
 _serialize_table_type ( Value.Type ( x ) )

          &
 ", "

          &
 Serialize ( Table.ToRows ( x ) )

          &
 ") "
, 

      Serialize.Identifier = Expression.Identifier, 

      Serialize.Type = ( x ) => "type " & _serialize_typename ( x ), 

      _serialize_typename = ( x, optional funtype as logical ) => /* Optional parameter: Is this being used as part of a function signature? */
        let
          isFunctionType = ( x as type ) =>
            try if Type.FunctionReturn ( x ) is type then true else false otherwise false
, 

          isTableType = ( x as type ) =>
            try if Type.TableSchema ( x ) is table then true else false otherwise false
, 

          isRecordType = ( x as type ) =>
            try if Type.ClosedRecord ( x ) is type then true else false otherwise false
, 

          isListType = ( x as type ) =>
            try if Type.ListItem ( x ) is type then true else false otherwise false

        in

          if funtype = null and isTableType ( x ) then
            _serialize_table_type ( x )

          else
 if funtype = null and isListType ( x ) then
            "{ " & @_serialize_typename ( Type.ListItem ( x ) ) & " }"

          else
 if funtype = null and isFunctionType ( x ) then
            "function " & _serialize_function_type ( x )

          else
 if funtype = null and isRecordType ( x ) then
            _serialize_record_type ( x )

          else
 if x = type any then
            "any"

          else

            let
              base = Type.NonNullable ( x )

            in

              ( if Type.IsNullable ( x ) then "nullable " else "" )
                &
 (
                  if base = type anynonnull then
                    "anynonnull"

                  else
 if base = type binary then
                    "binary"

                  else
 if base = type date then
                    "date"

                  else
 if base = type datetime then
                    "datetime"

                  else
 if base = type datetimezone then
                    "datetimezone"

                  else
 if base = type duration then
                    "duration"

                  else
 if base = type logical then
                    "logical"

                  else
 if base = type none then
                    "none"

                  else
 if base = type null then
                    "null"

                  else
 if base = type number then
                    "number"

                  else
 if base = type text then
                    "text"

                  else
 if base = type time then
                    "time"

                  else
 if base = type type then
                    "type"

                  else
 /* Abstract types: */
                  if base = type function then
                    "function"

                  else
 if base = type table then
                    "table"

                  else
 if base = type record then
                    "record"

                  else
 if base = type list then
                    "list"

                  else

                    "any /*Actually unknown type*/"

                )
, 

      _serialize_table_type = ( x ) =>
        let
          schema = Type.TableSchema ( x )

        in

          "table "
            &
 (
              if Table.IsEmpty ( schema ) then
                ""

              else

                "["
                  &
 List.TransformAndCombine (
                    Table.ToRecords ( Table.Sort ( schema, "Position" ) ), 
                    each Serialize.Identifier ( _[Name] ) & " = " & _[Kind], 

                    ", "

                  )

                  &
 "] "

            )
, 

      _serialize_record_type = ( x ) =>
        let
          flds = Type.RecordFields ( x )

        in

          if Record.FieldCount ( flds ) = 0 then
            "record"

          else

            "["
              &
 List.TransformAndCombine (
                Record.FieldNames ( flds ), 
                ( item ) =>
                  Serialize.Identifier ( item )
                    &
 "="

                    &
 _serialize_typename ( Record.Field ( flds, item )[Type] )
, 

                ", "

              )

              &
 ( if Type.IsOpenRecord ( x ) then ", ..." else "" )

              &
 "]"
, 

      _serialize_function_type = ( x ) =>
        _serialize_function_param_type (
          Type.FunctionParameters ( x ), 
          Type.FunctionRequiredParameters ( x )

        )

          &
 " as "

          &
 _serialize_function_return_type ( x )
, 

      _serialize_function_param_type = ( t, n ) =>
        let
          funsig = Table.ToRecords (
            Table.TransformColumns (
              Table.AddIndexColumn ( Record.ToTable ( t ), "isOptional", 1 ), 
              { "isOptional", ( x ) => x > n }

            )

          )

        in

          "("
            &
 List.TransformAndCombine (
              funsig, 
              ( item ) =>  ( if item[isOptional] then "optional " else "" )
                &
 Serialize.Identifier ( item[Name] )

                &
 " as "

                &
 _serialize_typename ( item[Value], true )
, 

              ", "

            )

            &
 ")"
, 

      _serialize_function_return_type = ( x ) =>
        _serialize_typename ( Type.FunctionReturn ( x ), true )
, 

      Serialize = ( x ) as text =>
        if x is binary then
          try Serialize.Binary ( x ) otherwise "null /*serialize failed*/"

        else
 if x is date then
          try Expression.Constant ( x ) otherwise "null /*serialize failed*/"

        else
 if x is datetime then
          try Expression.Constant ( x ) otherwise "null /*serialize failed*/"

        else
 if x is datetimezone then
          try Expression.Constant ( x ) otherwise "null /*serialize failed*/"

        else
 if x is duration then
          try Expression.Constant ( x ) otherwise "null /*serialize failed*/"

        else
 if x is function then
          try Serialize.Function ( x ) otherwise "null /*serialize failed*/"

        else
 if x is list then
          try Serialize.List ( x ) otherwise "null /*serialize failed*/"

        else
 if x is logical then
          try Expression.Constant ( x ) otherwise "null /*serialize failed*/"

        else
 if x is null then
          try Expression.Constant ( x ) otherwise "null /*serialize failed*/"

        else
 if x is number then
          try Expression.Constant ( x ) otherwise "null /*serialize failed*/"

        else
 if x is record then
          try Serialize.Record ( x ) otherwise "null /*serialize failed*/"

        else
 if x is table then
          try Serialize.Table ( x ) otherwise "null /*serialize failed*/"

        else
 if x is text then
          try Expression.Constant ( x ) otherwise "null /*serialize failed*/"

        else
 if x is time then
          try Expression.Constant ( x ) otherwise "null /*serialize failed*/"

        else
 if x is type then
          try Serialize.Type ( x ) otherwise "null /*serialize failed*/"

        else

          "[#_unable_to_serialize_#]"

    in

      try Serialize ( value ) otherwise "<serialization failed>"
;

UnitTesting.pq

section UnitTesting;
  shared UnitTesting.ReturnsABC = () => "ABC";
  shared UnitTesting.Returns123 = () => "123";

  shared UnitTesting.ReturnsTableWithFiveRows = () =>
    Table.Repeat ( #table ( { "a" }, { { 1 } } ), 5 )
;

UnitTesting.query.pq

section UnitTestingUnitTests;
  shared MyExtension.UnitTest = [
    // Put any common variables here if you only want them to be evaluated once
    // Fact(<Name of the Test>, <Expected Value>, <Actual Value>)
    // <Expected Value> and <Actual Value> can be a literal or let statement
    facts = {
      Fact ( "Check that this function returns 'ABC'",  // name of the test
      "ABC",  // expected value

      UnitTesting.ReturnsABC() // expression to evaluate (let or single statement)

       )
, 

      Fact ( "Check that this function returns '123'", "124", UnitTesting.Returns123() ), 

      Fact (
        "Result should contain 5 rows", 
        5, 

        Table.RowCount ( UnitTesting.ReturnsTableWithFiveRows() )

      )
, 

      Fact (
        "Values should be equal (using a let statement)", 
        "Hello World", 

        let
          a = "Hello World"

        in

          a

      )

    }
, 

    report = Facts.Summarize ( facts )

  ]
[report]
;

  /// COMMON UNIT TESTING CODE
  Fact = ( _subject as text, _expected, _actual ) as record =>
    [
      expected = try _expected, 
      safeExpected = 
        if
 expected[HasError] then
          "Expected : " & @ValueToText ( expected[Error] )

        else

          expected[Value]
, 

      actual = try _actual, 

      safeActual = 
        if
 actual[HasError] then
          "Actual : " & @ValueToText ( actual[Error] )

        else

          actual[Value]
, 

      attempt = try safeExpected = safeActual, 

      result = if attempt[HasError] or not attempt[Value] then "Failure ⛔" else "Success ✓", 

      resultOp = if result = "Success ✓" then " = " else " <> ", 

      addendumEvalAttempt = if attempt[HasError] then @ValueToText ( attempt[Error] ) else "", 

      addendumEvalExpected = try @ValueToText ( safeExpected ) otherwise "...", 

      addendumEvalActual = try @ValueToText ( safeActual ) otherwise "...", 

      fact = [
        Result  = result & " " & addendumEvalAttempt, 
        Notes   = _subject, 

        Details = " (" & addendumEvalExpected & resultOp & addendumEvalActual & ")"

      ]

    ]
[fact]
;

  Facts = ( _subject as text, _predicates as list ) =>
    List.Transform ( _predicates, each Fact ( _subject, _{0}, _{1} ) )
;

  Facts.Summarize = ( _facts as list ) as table =>
    [
      Fact.CountSuccesses = ( count, i ) =>
        [
          result = try i[Result], 
          sum = 
            if
 result[HasError] or not Text.StartsWith ( result[Value], "Success" ) then
              count

            else

              count + 1

        ]
[sum]
, 

      passed = List.Accumulate ( _facts, 0, Fact.CountSuccesses ), 

      total = List.Count ( _facts ), 

      format = if passed = total then "All #{0} Passed !!! ✓" else "#{0} Passed ☺ #{1} Failed ☹", 

      result = if passed = total then "Success" else "⛔", 

      rate = Number.IntegerDivide ( 100 * passed, total ), 

      header = [
        Result  = result, 
        Notes   = Text.Format ( format, { passed, total - passed } ), 

        Details = Text.Format ( "#{0}% success rate", { rate } )

      ]
, 

      report = Table.FromRecords ( List.Combine ( { { header }, _facts } ) )

    ]
[report]
;

  ValueToText = ( value, optional depth ) =>
    let
      List.TransformAndCombine = ( list, transform, separator ) =>
        Text.Combine ( List.Transform ( list, transform ), separator )
, 

      Serialize.Binary = ( x ) => "#binary(" & Serialize ( Binary.ToList ( x ) ) & ") ", 

      Serialize.Function = ( x ) =>
        _serialize_function_param_type (
          Type.FunctionParameters ( Value.Type ( x ) ), 
          Type.FunctionRequiredParameters ( Value.Type ( x ) )

        )

          &
 " as "

          &
 _serialize_function_return_type ( Value.Type ( x ) )

          &
 " => (...) "
, 

      Serialize.List = ( x ) => "{" & List.TransformAndCombine ( x, Serialize, ", " ) & "} ", 

      Serialize.Record = ( x ) =>
        "[ "
          &
 List.TransformAndCombine (
            Record.FieldNames ( x ), 
            ( item ) =>
              Serialize.Identifier ( item ) & " = " & Serialize ( Record.Field ( x, item ) )
, 

            ", "

          )

          &
 " ] "
, 

      Serialize.Table = ( x ) =>
        "#table( type "
          &
 _serialize_table_type ( Value.Type ( x ) )

          &
 ", "

          &
 Serialize ( Table.ToRows ( x ) )

          &
 ") "
, 

      Serialize.Identifier = Expression.Identifier, 

      Serialize.Type = ( x ) => "type " & _serialize_typename ( x ), 

      _serialize_typename = ( x, optional funtype as logical ) => /* Optional parameter: Is this being used as part of a function signature? */
        let
          isFunctionType = ( x as type ) =>
            try if Type.FunctionReturn ( x ) is type then true else false otherwise false
, 

          isTableType = ( x as type ) =>
            try if Type.TableSchema ( x ) is table then true else false otherwise false
, 

          isRecordType = ( x as type ) =>
            try if Type.ClosedRecord ( x ) is type then true else false otherwise false
, 

          isListType = ( x as type ) =>
            try if Type.ListItem ( x ) is type then true else false otherwise false

        in

          if funtype = null and isTableType ( x ) then
            _serialize_table_type ( x )

          else
 if funtype = null and isListType ( x ) then
            "{ " & @_serialize_typename ( Type.ListItem ( x ) ) & " }"

          else
 if funtype = null and isFunctionType ( x ) then
            "function " & _serialize_function_type ( x )

          else
 if funtype = null and isRecordType ( x ) then
            _serialize_record_type ( x )

          else
 if x = type any then
            "any"

          else

            let
              base = Type.NonNullable ( x )

            in

              ( if Type.IsNullable ( x ) then "nullable " else "" )
                &
 (
                  if base = type anynonnull then
                    "anynonnull"

                  else
 if base = type binary then
                    "binary"

                  else
 if base = type date then
                    "date"

                  else
 if base = type datetime then
                    "datetime"

                  else
 if base = type datetimezone then
                    "datetimezone"

                  else
 if base = type duration then
                    "duration"

                  else
 if base = type logical then
                    "logical"

                  else
 if base = type none then
                    "none"

                  else
 if base = type null then
                    "null"

                  else
 if base = type number then
                    "number"

                  else
 if base = type text then
                    "text"

                  else
 if base = type time then
                    "time"

                  else
 if base = type type then
                    "type"

                  else
 /* Abstract types: */
                  if base = type function then
                    "function"

                  else
 if base = type table then
                    "table"

                  else
 if base = type record then
                    "record"

                  else
 if base = type list then
                    "list"

                  else

                    "any /*Actually unknown type*/"

                )
, 

      _serialize_table_type = ( x ) =>
        let
          schema = Type.TableSchema ( x )

        in

          "table "
            &
 (
              if Table.IsEmpty ( schema ) then
                ""

              else

                "["
                  &
 List.TransformAndCombine (
                    Table.ToRecords ( Table.Sort ( schema, "Position" ) ), 
                    each Serialize.Identifier ( _[Name] ) & " = " & _[Kind], 

                    ", "

                  )

                  &
 "] "

            )
, 

      _serialize_record_type = ( x ) =>
        let
          flds = Type.RecordFields ( x )

        in

          if Record.FieldCount ( flds ) = 0 then
            "record"

          else

            "["
              &
 List.TransformAndCombine (
                Record.FieldNames ( flds ), 
                ( item ) =>
                  Serialize.Identifier ( item )
                    &
 "="

                    &
 _serialize_typename ( Record.Field ( flds, item )[Type] )
, 

                ", "

              )

              &
 ( if Type.IsOpenRecord ( x ) then ", ..." else "" )

              &
 "]"
, 

      _serialize_function_type = ( x ) =>
        _serialize_function_param_type (
          Type.FunctionParameters ( x ), 
          Type.FunctionRequiredParameters ( x )

        )

          &
 " as "

          &
 _serialize_function_return_type ( x )
, 

      _serialize_function_param_type = ( t, n ) =>
        let
          funsig = Table.ToRecords (
            Table.TransformColumns (
              Table.AddIndexColumn ( Record.ToTable ( t ), "isOptional", 1 ), 
              { "isOptional", ( x ) => x > n }

            )

          )

        in

          "("
            &
 List.TransformAndCombine (
              funsig, 
              ( item ) =>  ( if item[isOptional] then "optional " else "" )
                &
 Serialize.Identifier ( item[Name] )

                &
 " as "

                &
 _serialize_typename ( item[Value], true )
, 

              ", "

            )

            &
 ")"
, 

      _serialize_function_return_type = ( x ) =>
        _serialize_typename ( Type.FunctionReturn ( x ), true )
, 

      Serialize = ( x ) as text =>
        if x is binary then
          try Serialize.Binary ( x ) otherwise "null /*serialize failed*/"

        else
 if x is date then
          try Expression.Constant ( x ) otherwise "null /*serialize failed*/"

        else
 if x is datetime then
          try Expression.Constant ( x ) otherwise "null /*serialize failed*/"

        else
 if x is datetimezone then
          try Expression.Constant ( x ) otherwise "null /*serialize failed*/"

        else
 if x is duration then
          try Expression.Constant ( x ) otherwise "null /*serialize failed*/"

        else
 if x is function then
          try Serialize.Function ( x ) otherwise "null /*serialize failed*/"

        else
 if x is list then
          try Serialize.List ( x ) otherwise "null /*serialize failed*/"

        else
 if x is logical then
          try Expression.Constant ( x ) otherwise "null /*serialize failed*/"

        else
 if x is null then
          try Expression.Constant ( x ) otherwise "null /*serialize failed*/"

        else
 if x is number then
          try Expression.Constant ( x ) otherwise "null /*serialize failed*/"

        else
 if x is record then
          try Serialize.Record ( x ) otherwise "null /*serialize failed*/"

        else
 if x is table then
          try Serialize.Table ( x ) otherwise "null /*serialize failed*/"

        else
 if x is text then
          try Expression.Constant ( x ) otherwise "null /*serialize failed*/"

        else
 if x is time then
          try Expression.Constant ( x ) otherwise "null /*serialize failed*/"

        else
 if x is type then
          try Serialize.Type ( x ) otherwise "null /*serialize failed*/"

        else

          "[#_unable_to_serialize_#]"

    in

      try Serialize ( value ) otherwise "<serialization failed>"
;